nodes.h revision 217eb067308cf5aa43065377b66acbbee0f5b7c3
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "data_type.h"
32#include "deoptimization_kind.h"
33#include "dex_file.h"
34#include "dex_file_types.h"
35#include "entrypoints/quick/quick_entrypoints_enum.h"
36#include "handle.h"
37#include "handle_scope.h"
38#include "intrinsics_enum.h"
39#include "invoke_type.h"
40#include "locations.h"
41#include "method_reference.h"
42#include "mirror/class.h"
43#include "offsets.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class ArenaStack;
49class GraphChecker;
50class HBasicBlock;
51class HConstructorFence;
52class HCurrentMethod;
53class HDoubleConstant;
54class HEnvironment;
55class HFloatConstant;
56class HGraphBuilder;
57class HGraphVisitor;
58class HInstruction;
59class HIntConstant;
60class HInvoke;
61class HLongConstant;
62class HNullConstant;
63class HParameterValue;
64class HPhi;
65class HSuspendCheck;
66class HTryBoundary;
67class LiveInterval;
68class LocationSummary;
69class SlowPathCode;
70class SsaBuilder;
71
72namespace mirror {
73class DexCache;
74}  // namespace mirror
75
76static const int kDefaultNumberOfBlocks = 8;
77static const int kDefaultNumberOfSuccessors = 2;
78static const int kDefaultNumberOfPredecessors = 2;
79static const int kDefaultNumberOfExceptionalPredecessors = 0;
80static const int kDefaultNumberOfDominatedBlocks = 1;
81static const int kDefaultNumberOfBackEdges = 1;
82
83// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
84static constexpr int32_t kMaxIntShiftDistance = 0x1f;
85// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
86static constexpr int32_t kMaxLongShiftDistance = 0x3f;
87
88static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
89static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
90
91static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
92
93static constexpr uint32_t kNoDexPc = -1;
94
95inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
96  // For the purposes of the compiler, the dex files must actually be the same object
97  // if we want to safely treat them as the same. This is especially important for JIT
98  // as custom class loaders can open the same underlying file (or memory) multiple
99  // times and provide different class resolution but no two class loaders should ever
100  // use the same DexFile object - doing so is an unsupported hack that can lead to
101  // all sorts of weird failures.
102  return &lhs == &rhs;
103}
104
105enum IfCondition {
106  // All types.
107  kCondEQ,  // ==
108  kCondNE,  // !=
109  // Signed integers and floating-point numbers.
110  kCondLT,  // <
111  kCondLE,  // <=
112  kCondGT,  // >
113  kCondGE,  // >=
114  // Unsigned integers.
115  kCondB,   // <
116  kCondBE,  // <=
117  kCondA,   // >
118  kCondAE,  // >=
119  // First and last aliases.
120  kCondFirst = kCondEQ,
121  kCondLast = kCondAE,
122};
123
124enum GraphAnalysisResult {
125  kAnalysisSkipped,
126  kAnalysisInvalidBytecode,
127  kAnalysisFailThrowCatchLoop,
128  kAnalysisFailAmbiguousArrayOp,
129  kAnalysisSuccess,
130};
131
132template <typename T>
133static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
134  return static_cast<typename std::make_unsigned<T>::type>(x);
135}
136
137class HInstructionList : public ValueObject {
138 public:
139  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
140
141  void AddInstruction(HInstruction* instruction);
142  void RemoveInstruction(HInstruction* instruction);
143
144  // Insert `instruction` before/after an existing instruction `cursor`.
145  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
146  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
147
148  // Return true if this list contains `instruction`.
149  bool Contains(HInstruction* instruction) const;
150
151  // Return true if `instruction1` is found before `instruction2` in
152  // this instruction list and false otherwise.  Abort if none
153  // of these instructions is found.
154  bool FoundBefore(const HInstruction* instruction1,
155                   const HInstruction* instruction2) const;
156
157  bool IsEmpty() const { return first_instruction_ == nullptr; }
158  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
159
160  // Update the block of all instructions to be `block`.
161  void SetBlockOfInstructions(HBasicBlock* block) const;
162
163  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
164  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
165  void Add(const HInstructionList& instruction_list);
166
167  // Return the number of instructions in the list. This is an expensive operation.
168  size_t CountSize() const;
169
170 private:
171  HInstruction* first_instruction_;
172  HInstruction* last_instruction_;
173
174  friend class HBasicBlock;
175  friend class HGraph;
176  friend class HInstruction;
177  friend class HInstructionIterator;
178  friend class HInstructionIteratorHandleChanges;
179  friend class HBackwardInstructionIterator;
180
181  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
182};
183
184class ReferenceTypeInfo : ValueObject {
185 public:
186  typedef Handle<mirror::Class> TypeHandle;
187
188  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
189
190  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
191    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
192  }
193
194  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
195    return ReferenceTypeInfo(type_handle, is_exact);
196  }
197
198  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
199
200  static bool IsValidHandle(TypeHandle handle) {
201    return handle.GetReference() != nullptr;
202  }
203
204  bool IsValid() const {
205    return IsValidHandle(type_handle_);
206  }
207
208  bool IsExact() const { return is_exact_; }
209
210  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
211    DCHECK(IsValid());
212    return GetTypeHandle()->IsObjectClass();
213  }
214
215  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
216    DCHECK(IsValid());
217    return GetTypeHandle()->IsStringClass();
218  }
219
220  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
221    DCHECK(IsValid());
222    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
223  }
224
225  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
226    DCHECK(IsValid());
227    return GetTypeHandle()->IsInterface();
228  }
229
230  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
231    DCHECK(IsValid());
232    return GetTypeHandle()->IsArrayClass();
233  }
234
235  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
236    DCHECK(IsValid());
237    return GetTypeHandle()->IsPrimitiveArray();
238  }
239
240  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
241    DCHECK(IsValid());
242    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
243  }
244
245  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
246    DCHECK(IsValid());
247    if (!IsExact()) return false;
248    if (!IsArrayClass()) return false;
249    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
250  }
251
252  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
253    DCHECK(IsValid());
254    if (!IsExact()) return false;
255    if (!IsArrayClass()) return false;
256    if (!rti.IsArrayClass()) return false;
257    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
258        rti.GetTypeHandle()->GetComponentType());
259  }
260
261  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
262
263  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
264    DCHECK(IsValid());
265    DCHECK(rti.IsValid());
266    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
267  }
268
269  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
270    DCHECK(IsValid());
271    DCHECK(rti.IsValid());
272    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
273        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
274  }
275
276  // Returns true if the type information provide the same amount of details.
277  // Note that it does not mean that the instructions have the same actual type
278  // (because the type can be the result of a merge).
279  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
280    if (!IsValid() && !rti.IsValid()) {
281      // Invalid types are equal.
282      return true;
283    }
284    if (!IsValid() || !rti.IsValid()) {
285      // One is valid, the other not.
286      return false;
287    }
288    return IsExact() == rti.IsExact()
289        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
290  }
291
292 private:
293  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
294  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
295      : type_handle_(type_handle), is_exact_(is_exact) { }
296
297  // The class of the object.
298  TypeHandle type_handle_;
299  // Whether or not the type is exact or a superclass of the actual type.
300  // Whether or not we have any information about this type.
301  bool is_exact_;
302};
303
304std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
305
306// Control-flow graph of a method. Contains a list of basic blocks.
307class HGraph : public ArenaObject<kArenaAllocGraph> {
308 public:
309  HGraph(ArenaAllocator* allocator,
310         ArenaStack* arena_stack,
311         const DexFile& dex_file,
312         uint32_t method_idx,
313         InstructionSet instruction_set,
314         InvokeType invoke_type = kInvalidInvokeType,
315         bool debuggable = false,
316         bool osr = false,
317         int start_instruction_id = 0)
318      : allocator_(allocator),
319        arena_stack_(arena_stack),
320        blocks_(allocator->Adapter(kArenaAllocBlockList)),
321        reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
322        linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
323        entry_block_(nullptr),
324        exit_block_(nullptr),
325        maximum_number_of_out_vregs_(0),
326        number_of_vregs_(0),
327        number_of_in_vregs_(0),
328        temporaries_vreg_slots_(0),
329        has_bounds_checks_(false),
330        has_try_catch_(false),
331        has_simd_(false),
332        has_loops_(false),
333        has_irreducible_loops_(false),
334        debuggable_(debuggable),
335        current_instruction_id_(start_instruction_id),
336        dex_file_(dex_file),
337        method_idx_(method_idx),
338        invoke_type_(invoke_type),
339        in_ssa_form_(false),
340        number_of_cha_guards_(0),
341        instruction_set_(instruction_set),
342        cached_null_constant_(nullptr),
343        cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
344        cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
345        cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
346        cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
347        cached_current_method_(nullptr),
348        art_method_(nullptr),
349        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
350        osr_(osr),
351        cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
352    blocks_.reserve(kDefaultNumberOfBlocks);
353  }
354
355  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
356  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
357
358  ArenaAllocator* GetAllocator() const { return allocator_; }
359  ArenaStack* GetArenaStack() const { return arena_stack_; }
360  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
361
362  bool IsInSsaForm() const { return in_ssa_form_; }
363  void SetInSsaForm() { in_ssa_form_ = true; }
364
365  HBasicBlock* GetEntryBlock() const { return entry_block_; }
366  HBasicBlock* GetExitBlock() const { return exit_block_; }
367  bool HasExitBlock() const { return exit_block_ != nullptr; }
368
369  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
370  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
371
372  void AddBlock(HBasicBlock* block);
373
374  void ComputeDominanceInformation();
375  void ClearDominanceInformation();
376  void ClearLoopInformation();
377  void FindBackEdges(ArenaBitVector* visited);
378  GraphAnalysisResult BuildDominatorTree();
379  void SimplifyCFG();
380  void SimplifyCatchBlocks();
381
382  // Analyze all natural loops in this graph. Returns a code specifying that it
383  // was successful or the reason for failure. The method will fail if a loop
384  // is a throw-catch loop, i.e. the header is a catch block.
385  GraphAnalysisResult AnalyzeLoops() const;
386
387  // Iterate over blocks to compute try block membership. Needs reverse post
388  // order and loop information.
389  void ComputeTryBlockInformation();
390
391  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
392  // Returns the instruction to replace the invoke expression or null if the
393  // invoke is for a void method. Note that the caller is responsible for replacing
394  // and removing the invoke instruction.
395  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
396
397  // Update the loop and try membership of `block`, which was spawned from `reference`.
398  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
399  // should be the new back edge.
400  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
401                                             HBasicBlock* reference,
402                                             bool replace_if_back_edge);
403
404  // Need to add a couple of blocks to test if the loop body is entered and
405  // put deoptimization instructions, etc.
406  void TransformLoopHeaderForBCE(HBasicBlock* header);
407
408  // Adds a new loop directly after the loop with the given header and exit.
409  // Returns the new preheader.
410  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
411                                             HBasicBlock* body,
412                                             HBasicBlock* exit);
413
414  // Removes `block` from the graph. Assumes `block` has been disconnected from
415  // other blocks and has no instructions or phis.
416  void DeleteDeadEmptyBlock(HBasicBlock* block);
417
418  // Splits the edge between `block` and `successor` while preserving the
419  // indices in the predecessor/successor lists. If there are multiple edges
420  // between the blocks, the lowest indices are used.
421  // Returns the new block which is empty and has the same dex pc as `successor`.
422  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
423
424  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
425  void OrderLoopHeaderPredecessors(HBasicBlock* header);
426  void SimplifyLoop(HBasicBlock* header);
427
428  int32_t GetNextInstructionId() {
429    CHECK_NE(current_instruction_id_, INT32_MAX);
430    return current_instruction_id_++;
431  }
432
433  int32_t GetCurrentInstructionId() const {
434    return current_instruction_id_;
435  }
436
437  void SetCurrentInstructionId(int32_t id) {
438    CHECK_GE(id, current_instruction_id_);
439    current_instruction_id_ = id;
440  }
441
442  uint16_t GetMaximumNumberOfOutVRegs() const {
443    return maximum_number_of_out_vregs_;
444  }
445
446  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
447    maximum_number_of_out_vregs_ = new_value;
448  }
449
450  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
451    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
452  }
453
454  void UpdateTemporariesVRegSlots(size_t slots) {
455    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
456  }
457
458  size_t GetTemporariesVRegSlots() const {
459    DCHECK(!in_ssa_form_);
460    return temporaries_vreg_slots_;
461  }
462
463  void SetNumberOfVRegs(uint16_t number_of_vregs) {
464    number_of_vregs_ = number_of_vregs;
465  }
466
467  uint16_t GetNumberOfVRegs() const {
468    return number_of_vregs_;
469  }
470
471  void SetNumberOfInVRegs(uint16_t value) {
472    number_of_in_vregs_ = value;
473  }
474
475  uint16_t GetNumberOfInVRegs() const {
476    return number_of_in_vregs_;
477  }
478
479  uint16_t GetNumberOfLocalVRegs() const {
480    DCHECK(!in_ssa_form_);
481    return number_of_vregs_ - number_of_in_vregs_;
482  }
483
484  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
485    return reverse_post_order_;
486  }
487
488  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
489    DCHECK(GetReversePostOrder()[0] == entry_block_);
490    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
491  }
492
493  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
494    return ReverseRange(GetReversePostOrder());
495  }
496
497  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
498    return linear_order_;
499  }
500
501  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
502    return ReverseRange(GetLinearOrder());
503  }
504
505  bool HasBoundsChecks() const {
506    return has_bounds_checks_;
507  }
508
509  void SetHasBoundsChecks(bool value) {
510    has_bounds_checks_ = value;
511  }
512
513  bool IsDebuggable() const { return debuggable_; }
514
515  // Returns a constant of the given type and value. If it does not exist
516  // already, it is created and inserted into the graph. This method is only for
517  // integral types.
518  HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
519
520  // TODO: This is problematic for the consistency of reference type propagation
521  // because it can be created anytime after the pass and thus it will be left
522  // with an invalid type.
523  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
524
525  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
526    return CreateConstant(value, &cached_int_constants_, dex_pc);
527  }
528  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
529    return CreateConstant(value, &cached_long_constants_, dex_pc);
530  }
531  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
532    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
533  }
534  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
535    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
536  }
537
538  HCurrentMethod* GetCurrentMethod();
539
540  const DexFile& GetDexFile() const {
541    return dex_file_;
542  }
543
544  uint32_t GetMethodIdx() const {
545    return method_idx_;
546  }
547
548  // Get the method name (without the signature), e.g. "<init>"
549  const char* GetMethodName() const;
550
551  // Get the pretty method name (class + name + optionally signature).
552  std::string PrettyMethod(bool with_signature = true) const;
553
554  InvokeType GetInvokeType() const {
555    return invoke_type_;
556  }
557
558  InstructionSet GetInstructionSet() const {
559    return instruction_set_;
560  }
561
562  bool IsCompilingOsr() const { return osr_; }
563
564  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
565    return cha_single_implementation_list_;
566  }
567
568  void AddCHASingleImplementationDependency(ArtMethod* method) {
569    cha_single_implementation_list_.insert(method);
570  }
571
572  bool HasShouldDeoptimizeFlag() const {
573    return number_of_cha_guards_ != 0;
574  }
575
576  bool HasTryCatch() const { return has_try_catch_; }
577  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
578
579  bool HasSIMD() const { return has_simd_; }
580  void SetHasSIMD(bool value) { has_simd_ = value; }
581
582  bool HasLoops() const { return has_loops_; }
583  void SetHasLoops(bool value) { has_loops_ = value; }
584
585  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
586  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
587
588  ArtMethod* GetArtMethod() const { return art_method_; }
589  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
590
591  // Returns an instruction with the opposite Boolean value from 'cond'.
592  // The instruction has been inserted into the graph, either as a constant, or
593  // before cursor.
594  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
595
596  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
597
598  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
599  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
600  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
601
602 private:
603  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
604  void RemoveDeadBlocks(const ArenaBitVector& visited);
605
606  template <class InstructionType, typename ValueType>
607  InstructionType* CreateConstant(ValueType value,
608                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
609                                  uint32_t dex_pc = kNoDexPc) {
610    // Try to find an existing constant of the given value.
611    InstructionType* constant = nullptr;
612    auto cached_constant = cache->find(value);
613    if (cached_constant != cache->end()) {
614      constant = cached_constant->second;
615    }
616
617    // If not found or previously deleted, create and cache a new instruction.
618    // Don't bother reviving a previously deleted instruction, for simplicity.
619    if (constant == nullptr || constant->GetBlock() == nullptr) {
620      constant = new (allocator_) InstructionType(value, dex_pc);
621      cache->Overwrite(value, constant);
622      InsertConstant(constant);
623    }
624    return constant;
625  }
626
627  void InsertConstant(HConstant* instruction);
628
629  // Cache a float constant into the graph. This method should only be
630  // called by the SsaBuilder when creating "equivalent" instructions.
631  void CacheFloatConstant(HFloatConstant* constant);
632
633  // See CacheFloatConstant comment.
634  void CacheDoubleConstant(HDoubleConstant* constant);
635
636  ArenaAllocator* const allocator_;
637  ArenaStack* const arena_stack_;
638
639  // List of blocks in insertion order.
640  ArenaVector<HBasicBlock*> blocks_;
641
642  // List of blocks to perform a reverse post order tree traversal.
643  ArenaVector<HBasicBlock*> reverse_post_order_;
644
645  // List of blocks to perform a linear order tree traversal. Unlike the reverse
646  // post order, this order is not incrementally kept up-to-date.
647  ArenaVector<HBasicBlock*> linear_order_;
648
649  HBasicBlock* entry_block_;
650  HBasicBlock* exit_block_;
651
652  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
653  uint16_t maximum_number_of_out_vregs_;
654
655  // The number of virtual registers in this method. Contains the parameters.
656  uint16_t number_of_vregs_;
657
658  // The number of virtual registers used by parameters of this method.
659  uint16_t number_of_in_vregs_;
660
661  // Number of vreg size slots that the temporaries use (used in baseline compiler).
662  size_t temporaries_vreg_slots_;
663
664  // Flag whether there are bounds checks in the graph. We can skip
665  // BCE if it's false. It's only best effort to keep it up to date in
666  // the presence of code elimination so there might be false positives.
667  bool has_bounds_checks_;
668
669  // Flag whether there are try/catch blocks in the graph. We will skip
670  // try/catch-related passes if it's false. It's only best effort to keep
671  // it up to date in the presence of code elimination so there might be
672  // false positives.
673  bool has_try_catch_;
674
675  // Flag whether SIMD instructions appear in the graph. If true, the
676  // code generators may have to be more careful spilling the wider
677  // contents of SIMD registers.
678  bool has_simd_;
679
680  // Flag whether there are any loops in the graph. We can skip loop
681  // optimization if it's false. It's only best effort to keep it up
682  // to date in the presence of code elimination so there might be false
683  // positives.
684  bool has_loops_;
685
686  // Flag whether there are any irreducible loops in the graph. It's only
687  // best effort to keep it up to date in the presence of code elimination
688  // so there might be false positives.
689  bool has_irreducible_loops_;
690
691  // Indicates whether the graph should be compiled in a way that
692  // ensures full debuggability. If false, we can apply more
693  // aggressive optimizations that may limit the level of debugging.
694  const bool debuggable_;
695
696  // The current id to assign to a newly added instruction. See HInstruction.id_.
697  int32_t current_instruction_id_;
698
699  // The dex file from which the method is from.
700  const DexFile& dex_file_;
701
702  // The method index in the dex file.
703  const uint32_t method_idx_;
704
705  // If inlined, this encodes how the callee is being invoked.
706  const InvokeType invoke_type_;
707
708  // Whether the graph has been transformed to SSA form. Only used
709  // in debug mode to ensure we are not using properties only valid
710  // for non-SSA form (like the number of temporaries).
711  bool in_ssa_form_;
712
713  // Number of CHA guards in the graph. Used to short-circuit the
714  // CHA guard optimization pass when there is no CHA guard left.
715  uint32_t number_of_cha_guards_;
716
717  const InstructionSet instruction_set_;
718
719  // Cached constants.
720  HNullConstant* cached_null_constant_;
721  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
722  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
723  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
724  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
725
726  HCurrentMethod* cached_current_method_;
727
728  // The ArtMethod this graph is for. Note that for AOT, it may be null,
729  // for example for methods whose declaring class could not be resolved
730  // (such as when the superclass could not be found).
731  ArtMethod* art_method_;
732
733  // Keep the RTI of inexact Object to avoid having to pass stack handle
734  // collection pointer to passes which may create NullConstant.
735  ReferenceTypeInfo inexact_object_rti_;
736
737  // Whether we are compiling this graph for on stack replacement: this will
738  // make all loops seen as irreducible and emit special stack maps to mark
739  // compiled code entries which the interpreter can directly jump to.
740  const bool osr_;
741
742  // List of methods that are assumed to have single implementation.
743  ArenaSet<ArtMethod*> cha_single_implementation_list_;
744
745  friend class SsaBuilder;           // For caching constants.
746  friend class SsaLivenessAnalysis;  // For the linear order.
747  friend class HInliner;             // For the reverse post order.
748  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
749  DISALLOW_COPY_AND_ASSIGN(HGraph);
750};
751
752class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
753 public:
754  HLoopInformation(HBasicBlock* header, HGraph* graph)
755      : header_(header),
756        suspend_check_(nullptr),
757        irreducible_(false),
758        contains_irreducible_loop_(false),
759        back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
760        // Make bit vector growable, as the number of blocks may change.
761        blocks_(graph->GetAllocator(),
762                graph->GetBlocks().size(),
763                true,
764                kArenaAllocLoopInfoBackEdges) {
765    back_edges_.reserve(kDefaultNumberOfBackEdges);
766  }
767
768  bool IsIrreducible() const { return irreducible_; }
769  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
770
771  void Dump(std::ostream& os);
772
773  HBasicBlock* GetHeader() const {
774    return header_;
775  }
776
777  void SetHeader(HBasicBlock* block) {
778    header_ = block;
779  }
780
781  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
782  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
783  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
784
785  void AddBackEdge(HBasicBlock* back_edge) {
786    back_edges_.push_back(back_edge);
787  }
788
789  void RemoveBackEdge(HBasicBlock* back_edge) {
790    RemoveElement(back_edges_, back_edge);
791  }
792
793  bool IsBackEdge(const HBasicBlock& block) const {
794    return ContainsElement(back_edges_, &block);
795  }
796
797  size_t NumberOfBackEdges() const {
798    return back_edges_.size();
799  }
800
801  HBasicBlock* GetPreHeader() const;
802
803  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
804    return back_edges_;
805  }
806
807  // Returns the lifetime position of the back edge that has the
808  // greatest lifetime position.
809  size_t GetLifetimeEnd() const;
810
811  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
812    ReplaceElement(back_edges_, existing, new_back_edge);
813  }
814
815  // Finds blocks that are part of this loop.
816  void Populate();
817
818  // Returns whether this loop information contains `block`.
819  // Note that this loop information *must* be populated before entering this function.
820  bool Contains(const HBasicBlock& block) const;
821
822  // Returns whether this loop information is an inner loop of `other`.
823  // Note that `other` *must* be populated before entering this function.
824  bool IsIn(const HLoopInformation& other) const;
825
826  // Returns true if instruction is not defined within this loop.
827  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
828
829  const ArenaBitVector& GetBlocks() const { return blocks_; }
830
831  void Add(HBasicBlock* block);
832  void Remove(HBasicBlock* block);
833
834  void ClearAllBlocks() {
835    blocks_.ClearAllBits();
836  }
837
838  bool HasBackEdgeNotDominatedByHeader() const;
839
840  bool IsPopulated() const {
841    return blocks_.GetHighestBitSet() != -1;
842  }
843
844  bool DominatesAllBackEdges(HBasicBlock* block);
845
846  bool HasExitEdge() const;
847
848 private:
849  // Internal recursive implementation of `Populate`.
850  void PopulateRecursive(HBasicBlock* block);
851  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
852
853  HBasicBlock* header_;
854  HSuspendCheck* suspend_check_;
855  bool irreducible_;
856  bool contains_irreducible_loop_;
857  ArenaVector<HBasicBlock*> back_edges_;
858  ArenaBitVector blocks_;
859
860  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
861};
862
863// Stores try/catch information for basic blocks.
864// Note that HGraph is constructed so that catch blocks cannot simultaneously
865// be try blocks.
866class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
867 public:
868  // Try block information constructor.
869  explicit TryCatchInformation(const HTryBoundary& try_entry)
870      : try_entry_(&try_entry),
871        catch_dex_file_(nullptr),
872        catch_type_index_(DexFile::kDexNoIndex16) {
873    DCHECK(try_entry_ != nullptr);
874  }
875
876  // Catch block information constructor.
877  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
878      : try_entry_(nullptr),
879        catch_dex_file_(&dex_file),
880        catch_type_index_(catch_type_index) {}
881
882  bool IsTryBlock() const { return try_entry_ != nullptr; }
883
884  const HTryBoundary& GetTryEntry() const {
885    DCHECK(IsTryBlock());
886    return *try_entry_;
887  }
888
889  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
890
891  bool IsCatchAllTypeIndex() const {
892    DCHECK(IsCatchBlock());
893    return !catch_type_index_.IsValid();
894  }
895
896  dex::TypeIndex GetCatchTypeIndex() const {
897    DCHECK(IsCatchBlock());
898    return catch_type_index_;
899  }
900
901  const DexFile& GetCatchDexFile() const {
902    DCHECK(IsCatchBlock());
903    return *catch_dex_file_;
904  }
905
906 private:
907  // One of possibly several TryBoundary instructions entering the block's try.
908  // Only set for try blocks.
909  const HTryBoundary* try_entry_;
910
911  // Exception type information. Only set for catch blocks.
912  const DexFile* catch_dex_file_;
913  const dex::TypeIndex catch_type_index_;
914};
915
916static constexpr size_t kNoLifetime = -1;
917static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
918
919// A block in a method. Contains the list of instructions represented
920// as a double linked list. Each block knows its predecessors and
921// successors.
922
923class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
924 public:
925  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
926      : graph_(graph),
927        predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
928        successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
929        loop_information_(nullptr),
930        dominator_(nullptr),
931        dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
932        block_id_(kInvalidBlockId),
933        dex_pc_(dex_pc),
934        lifetime_start_(kNoLifetime),
935        lifetime_end_(kNoLifetime),
936        try_catch_information_(nullptr) {
937    predecessors_.reserve(kDefaultNumberOfPredecessors);
938    successors_.reserve(kDefaultNumberOfSuccessors);
939    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
940  }
941
942  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
943    return predecessors_;
944  }
945
946  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
947    return successors_;
948  }
949
950  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
951  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
952
953  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
954    return ContainsElement(successors_, block, start_from);
955  }
956
957  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
958    return dominated_blocks_;
959  }
960
961  bool IsEntryBlock() const {
962    return graph_->GetEntryBlock() == this;
963  }
964
965  bool IsExitBlock() const {
966    return graph_->GetExitBlock() == this;
967  }
968
969  bool IsSingleGoto() const;
970  bool IsSingleReturn() const;
971  bool IsSingleReturnOrReturnVoidAllowingPhis() const;
972  bool IsSingleTryBoundary() const;
973
974  // Returns true if this block emits nothing but a jump.
975  bool IsSingleJump() const {
976    HLoopInformation* loop_info = GetLoopInformation();
977    return (IsSingleGoto() || IsSingleTryBoundary())
978           // Back edges generate a suspend check.
979           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
980  }
981
982  void AddBackEdge(HBasicBlock* back_edge) {
983    if (loop_information_ == nullptr) {
984      loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
985    }
986    DCHECK_EQ(loop_information_->GetHeader(), this);
987    loop_information_->AddBackEdge(back_edge);
988  }
989
990  HGraph* GetGraph() const { return graph_; }
991  void SetGraph(HGraph* graph) { graph_ = graph; }
992
993  uint32_t GetBlockId() const { return block_id_; }
994  void SetBlockId(int id) { block_id_ = id; }
995  uint32_t GetDexPc() const { return dex_pc_; }
996
997  HBasicBlock* GetDominator() const { return dominator_; }
998  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
999  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1000
1001  void RemoveDominatedBlock(HBasicBlock* block) {
1002    RemoveElement(dominated_blocks_, block);
1003  }
1004
1005  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1006    ReplaceElement(dominated_blocks_, existing, new_block);
1007  }
1008
1009  void ClearDominanceInformation();
1010
1011  int NumberOfBackEdges() const {
1012    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1013  }
1014
1015  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1016  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1017  const HInstructionList& GetInstructions() const { return instructions_; }
1018  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1019  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1020  const HInstructionList& GetPhis() const { return phis_; }
1021
1022  HInstruction* GetFirstInstructionDisregardMoves() const;
1023
1024  void AddSuccessor(HBasicBlock* block) {
1025    successors_.push_back(block);
1026    block->predecessors_.push_back(this);
1027  }
1028
1029  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1030    size_t successor_index = GetSuccessorIndexOf(existing);
1031    existing->RemovePredecessor(this);
1032    new_block->predecessors_.push_back(this);
1033    successors_[successor_index] = new_block;
1034  }
1035
1036  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1037    size_t predecessor_index = GetPredecessorIndexOf(existing);
1038    existing->RemoveSuccessor(this);
1039    new_block->successors_.push_back(this);
1040    predecessors_[predecessor_index] = new_block;
1041  }
1042
1043  // Insert `this` between `predecessor` and `successor. This method
1044  // preserves the indicies, and will update the first edge found between
1045  // `predecessor` and `successor`.
1046  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1047    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1048    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1049    successor->predecessors_[predecessor_index] = this;
1050    predecessor->successors_[successor_index] = this;
1051    successors_.push_back(successor);
1052    predecessors_.push_back(predecessor);
1053  }
1054
1055  void RemovePredecessor(HBasicBlock* block) {
1056    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1057  }
1058
1059  void RemoveSuccessor(HBasicBlock* block) {
1060    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1061  }
1062
1063  void ClearAllPredecessors() {
1064    predecessors_.clear();
1065  }
1066
1067  void AddPredecessor(HBasicBlock* block) {
1068    predecessors_.push_back(block);
1069    block->successors_.push_back(this);
1070  }
1071
1072  void SwapPredecessors() {
1073    DCHECK_EQ(predecessors_.size(), 2u);
1074    std::swap(predecessors_[0], predecessors_[1]);
1075  }
1076
1077  void SwapSuccessors() {
1078    DCHECK_EQ(successors_.size(), 2u);
1079    std::swap(successors_[0], successors_[1]);
1080  }
1081
1082  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1083    return IndexOfElement(predecessors_, predecessor);
1084  }
1085
1086  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1087    return IndexOfElement(successors_, successor);
1088  }
1089
1090  HBasicBlock* GetSinglePredecessor() const {
1091    DCHECK_EQ(GetPredecessors().size(), 1u);
1092    return GetPredecessors()[0];
1093  }
1094
1095  HBasicBlock* GetSingleSuccessor() const {
1096    DCHECK_EQ(GetSuccessors().size(), 1u);
1097    return GetSuccessors()[0];
1098  }
1099
1100  // Returns whether the first occurrence of `predecessor` in the list of
1101  // predecessors is at index `idx`.
1102  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1103    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1104    return GetPredecessorIndexOf(predecessor) == idx;
1105  }
1106
1107  // Create a new block between this block and its predecessors. The new block
1108  // is added to the graph, all predecessor edges are relinked to it and an edge
1109  // is created to `this`. Returns the new empty block. Reverse post order or
1110  // loop and try/catch information are not updated.
1111  HBasicBlock* CreateImmediateDominator();
1112
1113  // Split the block into two blocks just before `cursor`. Returns the newly
1114  // created, latter block. Note that this method will add the block to the
1115  // graph, create a Goto at the end of the former block and will create an edge
1116  // between the blocks. It will not, however, update the reverse post order or
1117  // loop and try/catch information.
1118  HBasicBlock* SplitBefore(HInstruction* cursor);
1119
1120  // Split the block into two blocks just before `cursor`. Returns the newly
1121  // created block. Note that this method just updates raw block information,
1122  // like predecessors, successors, dominators, and instruction list. It does not
1123  // update the graph, reverse post order, loop information, nor make sure the
1124  // blocks are consistent (for example ending with a control flow instruction).
1125  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1126
1127  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1128  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1129
1130  // Merge `other` at the end of `this`. Successors and dominated blocks of
1131  // `other` are changed to be successors and dominated blocks of `this`. Note
1132  // that this method does not update the graph, reverse post order, loop
1133  // information, nor make sure the blocks are consistent (for example ending
1134  // with a control flow instruction).
1135  void MergeWithInlined(HBasicBlock* other);
1136
1137  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1138  // of `this` are moved to `other`.
1139  // Note that this method does not update the graph, reverse post order, loop
1140  // information, nor make sure the blocks are consistent (for example ending
1141  // with a control flow instruction).
1142  void ReplaceWith(HBasicBlock* other);
1143
1144  // Merges the instructions of `other` at the end of `this`.
1145  void MergeInstructionsWith(HBasicBlock* other);
1146
1147  // Merge `other` at the end of `this`. This method updates loops, reverse post
1148  // order, links to predecessors, successors, dominators and deletes the block
1149  // from the graph. The two blocks must be successive, i.e. `this` the only
1150  // predecessor of `other` and vice versa.
1151  void MergeWith(HBasicBlock* other);
1152
1153  // Disconnects `this` from all its predecessors, successors and dominator,
1154  // removes it from all loops it is included in and eventually from the graph.
1155  // The block must not dominate any other block. Predecessors and successors
1156  // are safely updated.
1157  void DisconnectAndDelete();
1158
1159  void AddInstruction(HInstruction* instruction);
1160  // Insert `instruction` before/after an existing instruction `cursor`.
1161  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1162  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1163  // Replace phi `initial` with `replacement` within this block.
1164  void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1165  // Replace instruction `initial` with `replacement` within this block.
1166  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1167                                       HInstruction* replacement);
1168  void AddPhi(HPhi* phi);
1169  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1170  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1171  // instruction list. With 'ensure_safety' set to true, it verifies that the
1172  // instruction is not in use and removes it from the use lists of its inputs.
1173  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1174  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1175  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1176
1177  bool IsLoopHeader() const {
1178    return IsInLoop() && (loop_information_->GetHeader() == this);
1179  }
1180
1181  bool IsLoopPreHeaderFirstPredecessor() const {
1182    DCHECK(IsLoopHeader());
1183    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1184  }
1185
1186  bool IsFirstPredecessorBackEdge() const {
1187    DCHECK(IsLoopHeader());
1188    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1189  }
1190
1191  HLoopInformation* GetLoopInformation() const {
1192    return loop_information_;
1193  }
1194
1195  // Set the loop_information_ on this block. Overrides the current
1196  // loop_information if it is an outer loop of the passed loop information.
1197  // Note that this method is called while creating the loop information.
1198  void SetInLoop(HLoopInformation* info) {
1199    if (IsLoopHeader()) {
1200      // Nothing to do. This just means `info` is an outer loop.
1201    } else if (!IsInLoop()) {
1202      loop_information_ = info;
1203    } else if (loop_information_->Contains(*info->GetHeader())) {
1204      // Block is currently part of an outer loop. Make it part of this inner loop.
1205      // Note that a non loop header having a loop information means this loop information
1206      // has already been populated
1207      loop_information_ = info;
1208    } else {
1209      // Block is part of an inner loop. Do not update the loop information.
1210      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1211      // at this point, because this method is being called while populating `info`.
1212    }
1213  }
1214
1215  // Raw update of the loop information.
1216  void SetLoopInformation(HLoopInformation* info) {
1217    loop_information_ = info;
1218  }
1219
1220  bool IsInLoop() const { return loop_information_ != nullptr; }
1221
1222  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1223
1224  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1225    try_catch_information_ = try_catch_information;
1226  }
1227
1228  bool IsTryBlock() const {
1229    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1230  }
1231
1232  bool IsCatchBlock() const {
1233    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1234  }
1235
1236  // Returns the try entry that this block's successors should have. They will
1237  // be in the same try, unless the block ends in a try boundary. In that case,
1238  // the appropriate try entry will be returned.
1239  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1240
1241  bool HasThrowingInstructions() const;
1242
1243  // Returns whether this block dominates the blocked passed as parameter.
1244  bool Dominates(HBasicBlock* block) const;
1245
1246  size_t GetLifetimeStart() const { return lifetime_start_; }
1247  size_t GetLifetimeEnd() const { return lifetime_end_; }
1248
1249  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1250  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1251
1252  bool EndsWithControlFlowInstruction() const;
1253  bool EndsWithIf() const;
1254  bool EndsWithTryBoundary() const;
1255  bool HasSinglePhi() const;
1256
1257 private:
1258  HGraph* graph_;
1259  ArenaVector<HBasicBlock*> predecessors_;
1260  ArenaVector<HBasicBlock*> successors_;
1261  HInstructionList instructions_;
1262  HInstructionList phis_;
1263  HLoopInformation* loop_information_;
1264  HBasicBlock* dominator_;
1265  ArenaVector<HBasicBlock*> dominated_blocks_;
1266  uint32_t block_id_;
1267  // The dex program counter of the first instruction of this block.
1268  const uint32_t dex_pc_;
1269  size_t lifetime_start_;
1270  size_t lifetime_end_;
1271  TryCatchInformation* try_catch_information_;
1272
1273  friend class HGraph;
1274  friend class HInstruction;
1275
1276  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1277};
1278
1279// Iterates over the LoopInformation of all loops which contain 'block'
1280// from the innermost to the outermost.
1281class HLoopInformationOutwardIterator : public ValueObject {
1282 public:
1283  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1284      : current_(block.GetLoopInformation()) {}
1285
1286  bool Done() const { return current_ == nullptr; }
1287
1288  void Advance() {
1289    DCHECK(!Done());
1290    current_ = current_->GetPreHeader()->GetLoopInformation();
1291  }
1292
1293  HLoopInformation* Current() const {
1294    DCHECK(!Done());
1295    return current_;
1296  }
1297
1298 private:
1299  HLoopInformation* current_;
1300
1301  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1302};
1303
1304#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1305  M(Above, Condition)                                                   \
1306  M(AboveOrEqual, Condition)                                            \
1307  M(Add, BinaryOperation)                                               \
1308  M(And, BinaryOperation)                                               \
1309  M(ArrayGet, Instruction)                                              \
1310  M(ArrayLength, Instruction)                                           \
1311  M(ArraySet, Instruction)                                              \
1312  M(Below, Condition)                                                   \
1313  M(BelowOrEqual, Condition)                                            \
1314  M(BooleanNot, UnaryOperation)                                         \
1315  M(BoundsCheck, Instruction)                                           \
1316  M(BoundType, Instruction)                                             \
1317  M(CheckCast, Instruction)                                             \
1318  M(ClassTableGet, Instruction)                                         \
1319  M(ClearException, Instruction)                                        \
1320  M(ClinitCheck, Instruction)                                           \
1321  M(Compare, BinaryOperation)                                           \
1322  M(ConstructorFence, Instruction)                                      \
1323  M(CurrentMethod, Instruction)                                         \
1324  M(ShouldDeoptimizeFlag, Instruction)                                  \
1325  M(Deoptimize, Instruction)                                            \
1326  M(Div, BinaryOperation)                                               \
1327  M(DivZeroCheck, Instruction)                                          \
1328  M(DoubleConstant, Constant)                                           \
1329  M(Equal, Condition)                                                   \
1330  M(Exit, Instruction)                                                  \
1331  M(FloatConstant, Constant)                                            \
1332  M(Goto, Instruction)                                                  \
1333  M(GreaterThan, Condition)                                             \
1334  M(GreaterThanOrEqual, Condition)                                      \
1335  M(If, Instruction)                                                    \
1336  M(InstanceFieldGet, Instruction)                                      \
1337  M(InstanceFieldSet, Instruction)                                      \
1338  M(InstanceOf, Instruction)                                            \
1339  M(IntConstant, Constant)                                              \
1340  M(IntermediateAddress, Instruction)                                   \
1341  M(InvokeUnresolved, Invoke)                                           \
1342  M(InvokeInterface, Invoke)                                            \
1343  M(InvokeStaticOrDirect, Invoke)                                       \
1344  M(InvokeVirtual, Invoke)                                              \
1345  M(InvokePolymorphic, Invoke)                                          \
1346  M(LessThan, Condition)                                                \
1347  M(LessThanOrEqual, Condition)                                         \
1348  M(LoadClass, Instruction)                                             \
1349  M(LoadException, Instruction)                                         \
1350  M(LoadString, Instruction)                                            \
1351  M(LongConstant, Constant)                                             \
1352  M(MemoryBarrier, Instruction)                                         \
1353  M(MonitorOperation, Instruction)                                      \
1354  M(Mul, BinaryOperation)                                               \
1355  M(NativeDebugInfo, Instruction)                                       \
1356  M(Neg, UnaryOperation)                                                \
1357  M(NewArray, Instruction)                                              \
1358  M(NewInstance, Instruction)                                           \
1359  M(Not, UnaryOperation)                                                \
1360  M(NotEqual, Condition)                                                \
1361  M(NullConstant, Instruction)                                          \
1362  M(NullCheck, Instruction)                                             \
1363  M(Or, BinaryOperation)                                                \
1364  M(PackedSwitch, Instruction)                                          \
1365  M(ParallelMove, Instruction)                                          \
1366  M(ParameterValue, Instruction)                                        \
1367  M(Phi, Instruction)                                                   \
1368  M(Rem, BinaryOperation)                                               \
1369  M(Return, Instruction)                                                \
1370  M(ReturnVoid, Instruction)                                            \
1371  M(Ror, BinaryOperation)                                               \
1372  M(Shl, BinaryOperation)                                               \
1373  M(Shr, BinaryOperation)                                               \
1374  M(StaticFieldGet, Instruction)                                        \
1375  M(StaticFieldSet, Instruction)                                        \
1376  M(UnresolvedInstanceFieldGet, Instruction)                            \
1377  M(UnresolvedInstanceFieldSet, Instruction)                            \
1378  M(UnresolvedStaticFieldGet, Instruction)                              \
1379  M(UnresolvedStaticFieldSet, Instruction)                              \
1380  M(Select, Instruction)                                                \
1381  M(Sub, BinaryOperation)                                               \
1382  M(SuspendCheck, Instruction)                                          \
1383  M(Throw, Instruction)                                                 \
1384  M(TryBoundary, Instruction)                                           \
1385  M(TypeConversion, Instruction)                                        \
1386  M(UShr, BinaryOperation)                                              \
1387  M(Xor, BinaryOperation)                                               \
1388  M(VecReplicateScalar, VecUnaryOperation)                              \
1389  M(VecExtractScalar, VecUnaryOperation)                                \
1390  M(VecReduce, VecUnaryOperation)                                       \
1391  M(VecCnv, VecUnaryOperation)                                          \
1392  M(VecNeg, VecUnaryOperation)                                          \
1393  M(VecAbs, VecUnaryOperation)                                          \
1394  M(VecNot, VecUnaryOperation)                                          \
1395  M(VecAdd, VecBinaryOperation)                                         \
1396  M(VecHalvingAdd, VecBinaryOperation)                                  \
1397  M(VecSub, VecBinaryOperation)                                         \
1398  M(VecMul, VecBinaryOperation)                                         \
1399  M(VecDiv, VecBinaryOperation)                                         \
1400  M(VecMin, VecBinaryOperation)                                         \
1401  M(VecMax, VecBinaryOperation)                                         \
1402  M(VecAnd, VecBinaryOperation)                                         \
1403  M(VecAndNot, VecBinaryOperation)                                      \
1404  M(VecOr, VecBinaryOperation)                                          \
1405  M(VecXor, VecBinaryOperation)                                         \
1406  M(VecShl, VecBinaryOperation)                                         \
1407  M(VecShr, VecBinaryOperation)                                         \
1408  M(VecUShr, VecBinaryOperation)                                        \
1409  M(VecSetScalars, VecOperation)                                        \
1410  M(VecMultiplyAccumulate, VecOperation)                                \
1411  M(VecSADAccumulate, VecOperation)                                     \
1412  M(VecLoad, VecMemoryOperation)                                        \
1413  M(VecStore, VecMemoryOperation)                                       \
1414
1415/*
1416 * Instructions, shared across several (not all) architectures.
1417 */
1418#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1419#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1420#else
1421#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1422  M(BitwiseNegatedRight, Instruction)                                   \
1423  M(DataProcWithShifterOp, Instruction)                                 \
1424  M(MultiplyAccumulate, Instruction)                                    \
1425  M(IntermediateAddressIndex, Instruction)
1426#endif
1427
1428#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1429
1430#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1431
1432#ifndef ART_ENABLE_CODEGEN_mips
1433#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1434#else
1435#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1436  M(MipsComputeBaseMethodAddress, Instruction)                          \
1437  M(MipsPackedSwitch, Instruction)                                      \
1438  M(IntermediateArrayAddressIndex, Instruction)
1439#endif
1440
1441#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1442
1443#ifndef ART_ENABLE_CODEGEN_x86
1444#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1445#else
1446#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1447  M(X86ComputeBaseMethodAddress, Instruction)                           \
1448  M(X86LoadFromConstantTable, Instruction)                              \
1449  M(X86FPNeg, Instruction)                                              \
1450  M(X86PackedSwitch, Instruction)
1451#endif
1452
1453#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1454
1455#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1456  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1457  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1458  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1459  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1460  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1461  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1462  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1463  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1464
1465#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1466  M(Condition, BinaryOperation)                                         \
1467  M(Constant, Instruction)                                              \
1468  M(UnaryOperation, Instruction)                                        \
1469  M(BinaryOperation, Instruction)                                       \
1470  M(Invoke, Instruction)                                                \
1471  M(VecOperation, Instruction)                                          \
1472  M(VecUnaryOperation, VecOperation)                                    \
1473  M(VecBinaryOperation, VecOperation)                                   \
1474  M(VecMemoryOperation, VecOperation)
1475
1476#define FOR_EACH_INSTRUCTION(M)                                         \
1477  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1478  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1479
1480#define FORWARD_DECLARATION(type, super) class H##type;
1481FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1482#undef FORWARD_DECLARATION
1483
1484#define DECLARE_INSTRUCTION(type)                                         \
1485  private:                                                                \
1486  H##type& operator=(const H##type&) = delete;                            \
1487  public:                                                                 \
1488  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1489  const char* DebugName() const OVERRIDE { return #type; }                \
1490  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1491    return other->Is##type();                                             \
1492  }                                                                       \
1493  HInstruction* Clone(ArenaAllocator* arena) const OVERRIDE {             \
1494    DCHECK(IsClonable());                                                 \
1495    return new (arena) H##type(*this->As##type());                        \
1496  }                                                                       \
1497  void Accept(HGraphVisitor* visitor) OVERRIDE
1498
1499#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1500  private:                                                              \
1501  H##type& operator=(const H##type&) = delete;                          \
1502  public:                                                               \
1503  bool Is##type() const { return As##type() != nullptr; }               \
1504  const H##type* As##type() const { return this; }                      \
1505  H##type* As##type() { return this; }
1506
1507#define DEFAULT_COPY_CONSTRUCTOR(type)                                  \
1508  explicit H##type(const H##type& other) = default;
1509
1510template <typename T>
1511class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1512                     public IntrusiveForwardListNode<HUseListNode<T>> {
1513 public:
1514  // Get the instruction which has this use as one of the inputs.
1515  T GetUser() const { return user_; }
1516  // Get the position of the input record that this use corresponds to.
1517  size_t GetIndex() const { return index_; }
1518  // Set the position of the input record that this use corresponds to.
1519  void SetIndex(size_t index) { index_ = index; }
1520
1521 private:
1522  HUseListNode(T user, size_t index)
1523      : user_(user), index_(index) {}
1524
1525  T const user_;
1526  size_t index_;
1527
1528  friend class HInstruction;
1529
1530  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1531};
1532
1533template <typename T>
1534using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1535
1536// This class is used by HEnvironment and HInstruction classes to record the
1537// instructions they use and pointers to the corresponding HUseListNodes kept
1538// by the used instructions.
1539template <typename T>
1540class HUserRecord : public ValueObject {
1541 public:
1542  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1543  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1544
1545  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1546      : HUserRecord(old_record.instruction_, before_use_node) {}
1547  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1548      : instruction_(instruction), before_use_node_(before_use_node) {
1549    DCHECK(instruction_ != nullptr);
1550  }
1551
1552  HInstruction* GetInstruction() const { return instruction_; }
1553  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1554  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1555
1556 private:
1557  // Instruction used by the user.
1558  HInstruction* instruction_;
1559
1560  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1561  typename HUseList<T>::iterator before_use_node_;
1562};
1563
1564// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1565// This is used for HInstruction::GetInputs() to return a container wrapper providing
1566// HInstruction* values even though the underlying container has HUserRecord<>s.
1567struct HInputExtractor {
1568  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1569    return record.GetInstruction();
1570  }
1571  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1572    return record.GetInstruction();
1573  }
1574};
1575
1576using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1577using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1578
1579/**
1580 * Side-effects representation.
1581 *
1582 * For write/read dependences on fields/arrays, the dependence analysis uses
1583 * type disambiguation (e.g. a float field write cannot modify the value of an
1584 * integer field read) and the access type (e.g.  a reference array write cannot
1585 * modify the value of a reference field read [although it may modify the
1586 * reference fetch prior to reading the field, which is represented by its own
1587 * write/read dependence]). The analysis makes conservative points-to
1588 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1589 * the same, and any reference read depends on any reference read without
1590 * further regard of its type).
1591 *
1592 * The internal representation uses 38-bit and is described in the table below.
1593 * The first line indicates the side effect, and for field/array accesses the
1594 * second line indicates the type of the access (in the order of the
1595 * DataType::Type enum).
1596 * The two numbered lines below indicate the bit position in the bitfield (read
1597 * vertically).
1598 *
1599 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1600 *   +-------------+---------+---------+--------------+---------+---------+
1601 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1602 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1603 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1604 *
1605 * Note that, to ease the implementation, 'changes' bits are least significant
1606 * bits, while 'dependency' bits are most significant bits.
1607 */
1608class SideEffects : public ValueObject {
1609 public:
1610  SideEffects() : flags_(0) {}
1611
1612  static SideEffects None() {
1613    return SideEffects(0);
1614  }
1615
1616  static SideEffects All() {
1617    return SideEffects(kAllChangeBits | kAllDependOnBits);
1618  }
1619
1620  static SideEffects AllChanges() {
1621    return SideEffects(kAllChangeBits);
1622  }
1623
1624  static SideEffects AllDependencies() {
1625    return SideEffects(kAllDependOnBits);
1626  }
1627
1628  static SideEffects AllExceptGCDependency() {
1629    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1630  }
1631
1632  static SideEffects AllWritesAndReads() {
1633    return SideEffects(kAllWrites | kAllReads);
1634  }
1635
1636  static SideEffects AllWrites() {
1637    return SideEffects(kAllWrites);
1638  }
1639
1640  static SideEffects AllReads() {
1641    return SideEffects(kAllReads);
1642  }
1643
1644  static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1645    return is_volatile
1646        ? AllWritesAndReads()
1647        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1648  }
1649
1650  static SideEffects ArrayWriteOfType(DataType::Type type) {
1651    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1652  }
1653
1654  static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1655    return is_volatile
1656        ? AllWritesAndReads()
1657        : SideEffects(TypeFlag(type, kFieldReadOffset));
1658  }
1659
1660  static SideEffects ArrayReadOfType(DataType::Type type) {
1661    return SideEffects(TypeFlag(type, kArrayReadOffset));
1662  }
1663
1664  static SideEffects CanTriggerGC() {
1665    return SideEffects(1ULL << kCanTriggerGCBit);
1666  }
1667
1668  static SideEffects DependsOnGC() {
1669    return SideEffects(1ULL << kDependsOnGCBit);
1670  }
1671
1672  // Combines the side-effects of this and the other.
1673  SideEffects Union(SideEffects other) const {
1674    return SideEffects(flags_ | other.flags_);
1675  }
1676
1677  SideEffects Exclusion(SideEffects other) const {
1678    return SideEffects(flags_ & ~other.flags_);
1679  }
1680
1681  void Add(SideEffects other) {
1682    flags_ |= other.flags_;
1683  }
1684
1685  bool Includes(SideEffects other) const {
1686    return (other.flags_ & flags_) == other.flags_;
1687  }
1688
1689  bool HasSideEffects() const {
1690    return (flags_ & kAllChangeBits);
1691  }
1692
1693  bool HasDependencies() const {
1694    return (flags_ & kAllDependOnBits);
1695  }
1696
1697  // Returns true if there are no side effects or dependencies.
1698  bool DoesNothing() const {
1699    return flags_ == 0;
1700  }
1701
1702  // Returns true if something is written.
1703  bool DoesAnyWrite() const {
1704    return (flags_ & kAllWrites);
1705  }
1706
1707  // Returns true if something is read.
1708  bool DoesAnyRead() const {
1709    return (flags_ & kAllReads);
1710  }
1711
1712  // Returns true if potentially everything is written and read
1713  // (every type and every kind of access).
1714  bool DoesAllReadWrite() const {
1715    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1716  }
1717
1718  bool DoesAll() const {
1719    return flags_ == (kAllChangeBits | kAllDependOnBits);
1720  }
1721
1722  // Returns true if `this` may read something written by `other`.
1723  bool MayDependOn(SideEffects other) const {
1724    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1725    return (other.flags_ & depends_on_flags);
1726  }
1727
1728  // Returns string representation of flags (for debugging only).
1729  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1730  std::string ToString() const {
1731    std::string flags = "|";
1732    for (int s = kLastBit; s >= 0; s--) {
1733      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1734      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1735        // This is a bit for the GC side effect.
1736        if (current_bit_is_set) {
1737          flags += "GC";
1738        }
1739        flags += "|";
1740      } else {
1741        // This is a bit for the array/field analysis.
1742        // The underscore character stands for the 'can trigger GC' bit.
1743        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1744        if (current_bit_is_set) {
1745          flags += kDebug[s];
1746        }
1747        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1748            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1749          flags += "|";
1750        }
1751      }
1752    }
1753    return flags;
1754  }
1755
1756  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1757
1758 private:
1759  static constexpr int kFieldArrayAnalysisBits = 9;
1760
1761  static constexpr int kFieldWriteOffset = 0;
1762  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1763  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1764  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1765
1766  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1767
1768  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1769  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1770  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1771  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1772
1773  static constexpr int kLastBit = kDependsOnGCBit;
1774  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1775
1776  // Aliases.
1777
1778  static_assert(kChangeBits == kDependOnBits,
1779                "the 'change' bits should match the 'depend on' bits.");
1780
1781  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1782  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1783  static constexpr uint64_t kAllWrites =
1784      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1785  static constexpr uint64_t kAllReads =
1786      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1787
1788  // Translates type to bit flag. The type must correspond to a Java type.
1789  static uint64_t TypeFlag(DataType::Type type, int offset) {
1790    int shift;
1791    switch (type) {
1792      case DataType::Type::kReference: shift = 0; break;
1793      case DataType::Type::kBool:      shift = 1; break;
1794      case DataType::Type::kInt8:      shift = 2; break;
1795      case DataType::Type::kUint16:    shift = 3; break;
1796      case DataType::Type::kInt16:     shift = 4; break;
1797      case DataType::Type::kInt32:     shift = 5; break;
1798      case DataType::Type::kInt64:     shift = 6; break;
1799      case DataType::Type::kFloat32:   shift = 7; break;
1800      case DataType::Type::kFloat64:   shift = 8; break;
1801      default:
1802        LOG(FATAL) << "Unexpected data type " << type;
1803        UNREACHABLE();
1804    }
1805    DCHECK_LE(kFieldWriteOffset, shift);
1806    DCHECK_LT(shift, kArrayWriteOffset);
1807    return UINT64_C(1) << (shift + offset);
1808  }
1809
1810  // Private constructor on direct flags value.
1811  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1812
1813  uint64_t flags_;
1814};
1815
1816// A HEnvironment object contains the values of virtual registers at a given location.
1817class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1818 public:
1819  ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1820                             size_t number_of_vregs,
1821                             ArtMethod* method,
1822                             uint32_t dex_pc,
1823                             HInstruction* holder)
1824     : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
1825       locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
1826       parent_(nullptr),
1827       method_(method),
1828       dex_pc_(dex_pc),
1829       holder_(holder) {
1830  }
1831
1832  ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1833                             const HEnvironment& to_copy,
1834                             HInstruction* holder)
1835      : HEnvironment(allocator,
1836                     to_copy.Size(),
1837                     to_copy.GetMethod(),
1838                     to_copy.GetDexPc(),
1839                     holder) {}
1840
1841  void AllocateLocations() {
1842    DCHECK(locations_.empty());
1843    locations_.resize(vregs_.size());
1844  }
1845
1846  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1847    if (parent_ != nullptr) {
1848      parent_->SetAndCopyParentChain(allocator, parent);
1849    } else {
1850      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1851      parent_->CopyFrom(parent);
1852      if (parent->GetParent() != nullptr) {
1853        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1854      }
1855    }
1856  }
1857
1858  void CopyFrom(ArrayRef<HInstruction* const> locals);
1859  void CopyFrom(HEnvironment* environment);
1860
1861  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1862  // input to the loop phi instead. This is for inserting instructions that
1863  // require an environment (like HDeoptimization) in the loop pre-header.
1864  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1865
1866  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1867    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1868  }
1869
1870  HInstruction* GetInstructionAt(size_t index) const {
1871    return vregs_[index].GetInstruction();
1872  }
1873
1874  void RemoveAsUserOfInput(size_t index) const;
1875
1876  size_t Size() const { return vregs_.size(); }
1877
1878  HEnvironment* GetParent() const { return parent_; }
1879
1880  void SetLocationAt(size_t index, Location location) {
1881    locations_[index] = location;
1882  }
1883
1884  Location GetLocationAt(size_t index) const {
1885    return locations_[index];
1886  }
1887
1888  uint32_t GetDexPc() const {
1889    return dex_pc_;
1890  }
1891
1892  ArtMethod* GetMethod() const {
1893    return method_;
1894  }
1895
1896  HInstruction* GetHolder() const {
1897    return holder_;
1898  }
1899
1900
1901  bool IsFromInlinedInvoke() const {
1902    return GetParent() != nullptr;
1903  }
1904
1905 private:
1906  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1907  ArenaVector<Location> locations_;
1908  HEnvironment* parent_;
1909  ArtMethod* method_;
1910  const uint32_t dex_pc_;
1911
1912  // The instruction that holds this environment.
1913  HInstruction* const holder_;
1914
1915  friend class HInstruction;
1916
1917  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1918};
1919
1920class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1921 public:
1922  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1923      : previous_(nullptr),
1924        next_(nullptr),
1925        block_(nullptr),
1926        dex_pc_(dex_pc),
1927        id_(-1),
1928        ssa_index_(-1),
1929        packed_fields_(0u),
1930        environment_(nullptr),
1931        locations_(nullptr),
1932        live_interval_(nullptr),
1933        lifetime_position_(kNoLifetime),
1934        side_effects_(side_effects),
1935        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1936    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1937  }
1938
1939  virtual ~HInstruction() {}
1940
1941#define DECLARE_KIND(type, super) k##type,
1942  enum InstructionKind {
1943    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1944  };
1945#undef DECLARE_KIND
1946
1947  HInstruction* GetNext() const { return next_; }
1948  HInstruction* GetPrevious() const { return previous_; }
1949
1950  HInstruction* GetNextDisregardingMoves() const;
1951  HInstruction* GetPreviousDisregardingMoves() const;
1952
1953  HBasicBlock* GetBlock() const { return block_; }
1954  ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
1955  void SetBlock(HBasicBlock* block) { block_ = block; }
1956  bool IsInBlock() const { return block_ != nullptr; }
1957  bool IsInLoop() const { return block_->IsInLoop(); }
1958  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1959  bool IsIrreducibleLoopHeaderPhi() const {
1960    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1961  }
1962
1963  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1964
1965  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1966    // One virtual method is enough, just const_cast<> and then re-add the const.
1967    return ArrayRef<const HUserRecord<HInstruction*>>(
1968        const_cast<HInstruction*>(this)->GetInputRecords());
1969  }
1970
1971  HInputsRef GetInputs() {
1972    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1973  }
1974
1975  HConstInputsRef GetInputs() const {
1976    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1977  }
1978
1979  size_t InputCount() const { return GetInputRecords().size(); }
1980  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1981
1982  bool HasInput(HInstruction* input) const {
1983    for (const HInstruction* i : GetInputs()) {
1984      if (i == input) {
1985        return true;
1986      }
1987    }
1988    return false;
1989  }
1990
1991  void SetRawInputAt(size_t index, HInstruction* input) {
1992    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1993  }
1994
1995  virtual void Accept(HGraphVisitor* visitor) = 0;
1996  virtual const char* DebugName() const = 0;
1997
1998  virtual DataType::Type GetType() const { return DataType::Type::kVoid; }
1999
2000  virtual bool NeedsEnvironment() const { return false; }
2001
2002  uint32_t GetDexPc() const { return dex_pc_; }
2003
2004  virtual bool IsControlFlow() const { return false; }
2005
2006  // Can the instruction throw?
2007  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2008  // could throw OOME, but it is still OK to remove them if they are unused.
2009  virtual bool CanThrow() const { return false; }
2010  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2011
2012  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
2013  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2014
2015  // Does not apply for all instructions, but having this at top level greatly
2016  // simplifies the null check elimination.
2017  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
2018  virtual bool CanBeNull() const {
2019    DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2020    return true;
2021  }
2022
2023  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2024    return false;
2025  }
2026
2027  virtual bool IsActualObject() const {
2028    return GetType() == DataType::Type::kReference;
2029  }
2030
2031  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2032
2033  ReferenceTypeInfo GetReferenceTypeInfo() const {
2034    DCHECK_EQ(GetType(), DataType::Type::kReference);
2035    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2036                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
2037  }
2038
2039  void AddUseAt(HInstruction* user, size_t index) {
2040    DCHECK(user != nullptr);
2041    // Note: fixup_end remains valid across push_front().
2042    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2043    HUseListNode<HInstruction*>* new_node =
2044        new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HInstruction*>(user, index);
2045    uses_.push_front(*new_node);
2046    FixUpUserRecordsAfterUseInsertion(fixup_end);
2047  }
2048
2049  void AddEnvUseAt(HEnvironment* user, size_t index) {
2050    DCHECK(user != nullptr);
2051    // Note: env_fixup_end remains valid across push_front().
2052    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2053    HUseListNode<HEnvironment*>* new_node =
2054        new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2055    env_uses_.push_front(*new_node);
2056    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2057  }
2058
2059  void RemoveAsUserOfInput(size_t input) {
2060    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2061    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2062    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2063    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2064  }
2065
2066  void RemoveAsUserOfAllInputs() {
2067    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2068      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2069      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2070      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2071    }
2072  }
2073
2074  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2075  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2076
2077  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2078  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2079  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2080  bool HasOnlyOneNonEnvironmentUse() const {
2081    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2082  }
2083
2084  bool IsRemovable() const {
2085    return
2086        !DoesAnyWrite() &&
2087        !CanThrow() &&
2088        !IsSuspendCheck() &&
2089        !IsControlFlow() &&
2090        !IsNativeDebugInfo() &&
2091        !IsParameterValue() &&
2092        // If we added an explicit barrier then we should keep it.
2093        !IsMemoryBarrier() &&
2094        !IsConstructorFence();
2095  }
2096
2097  bool IsDeadAndRemovable() const {
2098    return IsRemovable() && !HasUses();
2099  }
2100
2101  // Does this instruction strictly dominate `other_instruction`?
2102  // Returns false if this instruction and `other_instruction` are the same.
2103  // Aborts if this instruction and `other_instruction` are both phis.
2104  bool StrictlyDominates(HInstruction* other_instruction) const;
2105
2106  int GetId() const { return id_; }
2107  void SetId(int id) { id_ = id; }
2108
2109  int GetSsaIndex() const { return ssa_index_; }
2110  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2111  bool HasSsaIndex() const { return ssa_index_ != -1; }
2112
2113  bool HasEnvironment() const { return environment_ != nullptr; }
2114  HEnvironment* GetEnvironment() const { return environment_; }
2115  // Set the `environment_` field. Raw because this method does not
2116  // update the uses lists.
2117  void SetRawEnvironment(HEnvironment* environment) {
2118    DCHECK(environment_ == nullptr);
2119    DCHECK_EQ(environment->GetHolder(), this);
2120    environment_ = environment;
2121  }
2122
2123  void InsertRawEnvironment(HEnvironment* environment) {
2124    DCHECK(environment_ != nullptr);
2125    DCHECK_EQ(environment->GetHolder(), this);
2126    DCHECK(environment->GetParent() == nullptr);
2127    environment->parent_ = environment_;
2128    environment_ = environment;
2129  }
2130
2131  void RemoveEnvironment();
2132
2133  // Set the environment of this instruction, copying it from `environment`. While
2134  // copying, the uses lists are being updated.
2135  void CopyEnvironmentFrom(HEnvironment* environment) {
2136    DCHECK(environment_ == nullptr);
2137    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2138    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2139    environment_->CopyFrom(environment);
2140    if (environment->GetParent() != nullptr) {
2141      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2142    }
2143  }
2144
2145  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2146                                                HBasicBlock* block) {
2147    DCHECK(environment_ == nullptr);
2148    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2149    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2150    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2151    if (environment->GetParent() != nullptr) {
2152      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2153    }
2154  }
2155
2156  // Returns the number of entries in the environment. Typically, that is the
2157  // number of dex registers in a method. It could be more in case of inlining.
2158  size_t EnvironmentSize() const;
2159
2160  LocationSummary* GetLocations() const { return locations_; }
2161  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2162
2163  void ReplaceWith(HInstruction* instruction);
2164  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2165  void ReplaceInput(HInstruction* replacement, size_t index);
2166
2167  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2168  // uses of this instruction by `other` are *not* updated.
2169  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2170    ReplaceWith(other);
2171    other->ReplaceInput(this, use_index);
2172  }
2173
2174  // Move `this` instruction before `cursor`
2175  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2176
2177  // Move `this` before its first user and out of any loops. If there is no
2178  // out-of-loop user that dominates all other users, move the instruction
2179  // to the end of the out-of-loop common dominator of the user's blocks.
2180  //
2181  // This can be used only on non-throwing instructions with no side effects that
2182  // have at least one use but no environment uses.
2183  void MoveBeforeFirstUserAndOutOfLoops();
2184
2185#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2186  bool Is##type() const;                                                       \
2187  const H##type* As##type() const;                                             \
2188  H##type* As##type();
2189
2190  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2191#undef INSTRUCTION_TYPE_CHECK
2192
2193#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2194  bool Is##type() const { return (As##type() != nullptr); }                    \
2195  virtual const H##type* As##type() const { return nullptr; }                  \
2196  virtual H##type* As##type() { return nullptr; }
2197  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2198#undef INSTRUCTION_TYPE_CHECK
2199
2200  // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2201  // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2202  // the instruction then the behaviour of this function is undefined.
2203  //
2204  // Note: It is semantically valid to create a clone of the instruction only until
2205  // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2206  // copied.
2207  //
2208  // Note: HEnvironment and some other fields are not copied and are set to default values, see
2209  // 'explicit HInstruction(const HInstruction& other)' for details.
2210  virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2211    LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2212                  DebugName() << " " << GetId();
2213    UNREACHABLE();
2214  }
2215
2216  // Return whether instruction can be cloned (copied).
2217  virtual bool IsClonable() const { return false; }
2218
2219  // Returns whether the instruction can be moved within the graph.
2220  // TODO: this method is used by LICM and GVN with possibly different
2221  //       meanings? split and rename?
2222  virtual bool CanBeMoved() const { return false; }
2223
2224  // Returns whether the two instructions are of the same kind.
2225  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2226    return false;
2227  }
2228
2229  // Returns whether any data encoded in the two instructions is equal.
2230  // This method does not look at the inputs. Both instructions must be
2231  // of the same type, otherwise the method has undefined behavior.
2232  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2233    return false;
2234  }
2235
2236  // Returns whether two instructions are equal, that is:
2237  // 1) They have the same type and contain the same data (InstructionDataEquals).
2238  // 2) Their inputs are identical.
2239  bool Equals(const HInstruction* other) const;
2240
2241  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2242  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2243  // the virtual function because the __attribute__((__pure__)) doesn't really
2244  // apply the strong requirement for virtual functions, preventing optimizations.
2245  InstructionKind GetKind() const PURE;
2246  virtual InstructionKind GetKindInternal() const = 0;
2247
2248  virtual size_t ComputeHashCode() const {
2249    size_t result = GetKind();
2250    for (const HInstruction* input : GetInputs()) {
2251      result = (result * 31) + input->GetId();
2252    }
2253    return result;
2254  }
2255
2256  SideEffects GetSideEffects() const { return side_effects_; }
2257  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2258  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2259
2260  size_t GetLifetimePosition() const { return lifetime_position_; }
2261  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2262  LiveInterval* GetLiveInterval() const { return live_interval_; }
2263  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2264  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2265
2266  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2267
2268  // Returns whether the code generation of the instruction will require to have access
2269  // to the current method. Such instructions are:
2270  // (1): Instructions that require an environment, as calling the runtime requires
2271  //      to walk the stack and have the current method stored at a specific stack address.
2272  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2273  //      to access the dex cache.
2274  bool NeedsCurrentMethod() const {
2275    return NeedsEnvironment() || IsCurrentMethod();
2276  }
2277
2278  // Returns whether the code generation of the instruction will require to have access
2279  // to the dex cache of the current method's declaring class via the current method.
2280  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2281
2282  // Does this instruction have any use in an environment before
2283  // control flow hits 'other'?
2284  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2285
2286  // Remove all references to environment uses of this instruction.
2287  // The caller must ensure that this is safe to do.
2288  void RemoveEnvironmentUsers();
2289
2290  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2291  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2292
2293 protected:
2294  // If set, the machine code for this instruction is assumed to be generated by
2295  // its users. Used by liveness analysis to compute use positions accordingly.
2296  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2297  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2298  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2299  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2300
2301  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2302    return GetInputRecords()[i];
2303  }
2304
2305  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2306    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2307    input_records[index] = input;
2308  }
2309
2310  uint32_t GetPackedFields() const {
2311    return packed_fields_;
2312  }
2313
2314  template <size_t flag>
2315  bool GetPackedFlag() const {
2316    return (packed_fields_ & (1u << flag)) != 0u;
2317  }
2318
2319  template <size_t flag>
2320  void SetPackedFlag(bool value = true) {
2321    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2322  }
2323
2324  template <typename BitFieldType>
2325  typename BitFieldType::value_type GetPackedField() const {
2326    return BitFieldType::Decode(packed_fields_);
2327  }
2328
2329  template <typename BitFieldType>
2330  void SetPackedField(typename BitFieldType::value_type value) {
2331    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2332    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2333  }
2334
2335  // Copy construction for the instruction (used for Clone function).
2336  //
2337  // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2338  // prepare_for_register_allocator are not copied (set to default values).
2339  //
2340  // Copy constructors must be provided for every HInstruction type; default copy constructor is
2341  // fine for most of them. However for some of the instructions a custom copy constructor must be
2342  // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2343  // for copying them).
2344  explicit HInstruction(const HInstruction& other)
2345      : previous_(nullptr),
2346        next_(nullptr),
2347        block_(nullptr),
2348        dex_pc_(other.dex_pc_),
2349        id_(-1),
2350        ssa_index_(-1),
2351        packed_fields_(other.packed_fields_),
2352        environment_(nullptr),
2353        locations_(nullptr),
2354        live_interval_(nullptr),
2355        lifetime_position_(kNoLifetime),
2356        side_effects_(other.side_effects_),
2357        reference_type_handle_(other.reference_type_handle_) {}
2358
2359 private:
2360  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2361    auto before_use_node = uses_.before_begin();
2362    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2363      HInstruction* user = use_node->GetUser();
2364      size_t input_index = use_node->GetIndex();
2365      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2366      before_use_node = use_node;
2367    }
2368  }
2369
2370  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2371    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2372    if (next != uses_.end()) {
2373      HInstruction* next_user = next->GetUser();
2374      size_t next_index = next->GetIndex();
2375      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2376      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2377    }
2378  }
2379
2380  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2381    auto before_env_use_node = env_uses_.before_begin();
2382    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2383      HEnvironment* user = env_use_node->GetUser();
2384      size_t input_index = env_use_node->GetIndex();
2385      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2386      before_env_use_node = env_use_node;
2387    }
2388  }
2389
2390  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2391    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2392    if (next != env_uses_.end()) {
2393      HEnvironment* next_user = next->GetUser();
2394      size_t next_index = next->GetIndex();
2395      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2396      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2397    }
2398  }
2399
2400  HInstruction* previous_;
2401  HInstruction* next_;
2402  HBasicBlock* block_;
2403  const uint32_t dex_pc_;
2404
2405  // An instruction gets an id when it is added to the graph.
2406  // It reflects creation order. A negative id means the instruction
2407  // has not been added to the graph.
2408  int id_;
2409
2410  // When doing liveness analysis, instructions that have uses get an SSA index.
2411  int ssa_index_;
2412
2413  // Packed fields.
2414  uint32_t packed_fields_;
2415
2416  // List of instructions that have this instruction as input.
2417  HUseList<HInstruction*> uses_;
2418
2419  // List of environments that contain this instruction.
2420  HUseList<HEnvironment*> env_uses_;
2421
2422  // The environment associated with this instruction. Not null if the instruction
2423  // might jump out of the method.
2424  HEnvironment* environment_;
2425
2426  // Set by the code generator.
2427  LocationSummary* locations_;
2428
2429  // Set by the liveness analysis.
2430  LiveInterval* live_interval_;
2431
2432  // Set by the liveness analysis, this is the position in a linear
2433  // order of blocks where this instruction's live interval start.
2434  size_t lifetime_position_;
2435
2436  SideEffects side_effects_;
2437
2438  // The reference handle part of the reference type info.
2439  // The IsExact() flag is stored in packed fields.
2440  // TODO: for primitive types this should be marked as invalid.
2441  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2442
2443  friend class GraphChecker;
2444  friend class HBasicBlock;
2445  friend class HEnvironment;
2446  friend class HGraph;
2447  friend class HInstructionList;
2448};
2449std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2450
2451// Iterates over the instructions, while preserving the next instruction
2452// in case the current instruction gets removed from the list by the user
2453// of this iterator.
2454class HInstructionIterator : public ValueObject {
2455 public:
2456  explicit HInstructionIterator(const HInstructionList& instructions)
2457      : instruction_(instructions.first_instruction_) {
2458    next_ = Done() ? nullptr : instruction_->GetNext();
2459  }
2460
2461  bool Done() const { return instruction_ == nullptr; }
2462  HInstruction* Current() const { return instruction_; }
2463  void Advance() {
2464    instruction_ = next_;
2465    next_ = Done() ? nullptr : instruction_->GetNext();
2466  }
2467
2468 private:
2469  HInstruction* instruction_;
2470  HInstruction* next_;
2471
2472  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2473};
2474
2475// Iterates over the instructions without saving the next instruction,
2476// therefore handling changes in the graph potentially made by the user
2477// of this iterator.
2478class HInstructionIteratorHandleChanges : public ValueObject {
2479 public:
2480  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2481      : instruction_(instructions.first_instruction_) {
2482  }
2483
2484  bool Done() const { return instruction_ == nullptr; }
2485  HInstruction* Current() const { return instruction_; }
2486  void Advance() {
2487    instruction_ = instruction_->GetNext();
2488  }
2489
2490 private:
2491  HInstruction* instruction_;
2492
2493  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2494};
2495
2496
2497class HBackwardInstructionIterator : public ValueObject {
2498 public:
2499  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2500      : instruction_(instructions.last_instruction_) {
2501    next_ = Done() ? nullptr : instruction_->GetPrevious();
2502  }
2503
2504  bool Done() const { return instruction_ == nullptr; }
2505  HInstruction* Current() const { return instruction_; }
2506  void Advance() {
2507    instruction_ = next_;
2508    next_ = Done() ? nullptr : instruction_->GetPrevious();
2509  }
2510
2511 private:
2512  HInstruction* instruction_;
2513  HInstruction* next_;
2514
2515  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2516};
2517
2518class HVariableInputSizeInstruction : public HInstruction {
2519 public:
2520  using HInstruction::GetInputRecords;  // Keep the const version visible.
2521  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2522    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2523  }
2524
2525  void AddInput(HInstruction* input);
2526  void InsertInputAt(size_t index, HInstruction* input);
2527  void RemoveInputAt(size_t index);
2528
2529  // Removes all the inputs.
2530  // Also removes this instructions from each input's use list
2531  // (for non-environment uses only).
2532  void RemoveAllInputs();
2533
2534 protected:
2535  HVariableInputSizeInstruction(SideEffects side_effects,
2536                                uint32_t dex_pc,
2537                                ArenaAllocator* allocator,
2538                                size_t number_of_inputs,
2539                                ArenaAllocKind kind)
2540      : HInstruction(side_effects, dex_pc),
2541        inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2542
2543  DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2544
2545  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2546};
2547
2548template<size_t N>
2549class HTemplateInstruction: public HInstruction {
2550 public:
2551  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2552      : HInstruction(side_effects, dex_pc), inputs_() {}
2553  virtual ~HTemplateInstruction() {}
2554
2555  using HInstruction::GetInputRecords;  // Keep the const version visible.
2556  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2557    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2558  }
2559
2560 protected:
2561  DEFAULT_COPY_CONSTRUCTOR(TemplateInstruction<N>);
2562
2563 private:
2564  std::array<HUserRecord<HInstruction*>, N> inputs_;
2565
2566  friend class SsaBuilder;
2567};
2568
2569// HTemplateInstruction specialization for N=0.
2570template<>
2571class HTemplateInstruction<0>: public HInstruction {
2572 public:
2573  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2574      : HInstruction(side_effects, dex_pc) {}
2575
2576  virtual ~HTemplateInstruction() {}
2577
2578  using HInstruction::GetInputRecords;  // Keep the const version visible.
2579  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2580    return ArrayRef<HUserRecord<HInstruction*>>();
2581  }
2582
2583 protected:
2584  DEFAULT_COPY_CONSTRUCTOR(TemplateInstruction<0>);
2585
2586 private:
2587  friend class SsaBuilder;
2588};
2589
2590template<intptr_t N>
2591class HExpression : public HTemplateInstruction<N> {
2592 public:
2593  HExpression<N>(DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2594      : HTemplateInstruction<N>(side_effects, dex_pc) {
2595    this->template SetPackedField<TypeField>(type);
2596  }
2597  virtual ~HExpression() {}
2598
2599  DataType::Type GetType() const OVERRIDE {
2600    return TypeField::Decode(this->GetPackedFields());
2601  }
2602
2603 protected:
2604  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2605  static constexpr size_t kFieldTypeSize =
2606      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2607  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2608  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2609                "Too many packed fields.");
2610  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2611  DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2612};
2613
2614// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2615// instruction that branches to the exit block.
2616class HReturnVoid FINAL : public HTemplateInstruction<0> {
2617 public:
2618  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2619      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2620
2621  bool IsControlFlow() const OVERRIDE { return true; }
2622
2623  DECLARE_INSTRUCTION(ReturnVoid);
2624
2625 protected:
2626  DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
2627};
2628
2629// Represents dex's RETURN opcodes. A HReturn is a control flow
2630// instruction that branches to the exit block.
2631class HReturn FINAL : public HTemplateInstruction<1> {
2632 public:
2633  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2634      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2635    SetRawInputAt(0, value);
2636  }
2637
2638  bool IsControlFlow() const OVERRIDE { return true; }
2639
2640  DECLARE_INSTRUCTION(Return);
2641
2642 protected:
2643  DEFAULT_COPY_CONSTRUCTOR(Return);
2644};
2645
2646class HPhi FINAL : public HVariableInputSizeInstruction {
2647 public:
2648  HPhi(ArenaAllocator* allocator,
2649       uint32_t reg_number,
2650       size_t number_of_inputs,
2651       DataType::Type type,
2652       uint32_t dex_pc = kNoDexPc)
2653      : HVariableInputSizeInstruction(
2654            SideEffects::None(),
2655            dex_pc,
2656            allocator,
2657            number_of_inputs,
2658            kArenaAllocPhiInputs),
2659        reg_number_(reg_number) {
2660    SetPackedField<TypeField>(ToPhiType(type));
2661    DCHECK_NE(GetType(), DataType::Type::kVoid);
2662    // Phis are constructed live and marked dead if conflicting or unused.
2663    // Individual steps of SsaBuilder should assume that if a phi has been
2664    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2665    SetPackedFlag<kFlagIsLive>(true);
2666    SetPackedFlag<kFlagCanBeNull>(true);
2667  }
2668
2669  bool IsClonable() const OVERRIDE { return true; }
2670
2671  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2672  static DataType::Type ToPhiType(DataType::Type type) {
2673    return DataType::Kind(type);
2674  }
2675
2676  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2677
2678  DataType::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2679  void SetType(DataType::Type new_type) {
2680    // Make sure that only valid type changes occur. The following are allowed:
2681    //  (1) int  -> float/ref (primitive type propagation),
2682    //  (2) long -> double (primitive type propagation).
2683    DCHECK(GetType() == new_type ||
2684           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2685           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2686           (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2687    SetPackedField<TypeField>(new_type);
2688  }
2689
2690  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2691  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2692
2693  uint32_t GetRegNumber() const { return reg_number_; }
2694
2695  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2696  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2697  bool IsDead() const { return !IsLive(); }
2698  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2699
2700  bool IsVRegEquivalentOf(const HInstruction* other) const {
2701    return other != nullptr
2702        && other->IsPhi()
2703        && other->AsPhi()->GetBlock() == GetBlock()
2704        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2705  }
2706
2707  bool HasEquivalentPhi() const {
2708    if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2709      return true;
2710    }
2711    if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2712      return true;
2713    }
2714    return false;
2715  }
2716
2717  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2718  // An equivalent phi is a phi having the same dex register and type.
2719  // It assumes that phis with the same dex register are adjacent.
2720  HPhi* GetNextEquivalentPhiWithSameType() {
2721    HInstruction* next = GetNext();
2722    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2723      if (next->GetType() == GetType()) {
2724        return next->AsPhi();
2725      }
2726      next = next->GetNext();
2727    }
2728    return nullptr;
2729  }
2730
2731  DECLARE_INSTRUCTION(Phi);
2732
2733 protected:
2734  DEFAULT_COPY_CONSTRUCTOR(Phi);
2735
2736 private:
2737  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2738  static constexpr size_t kFieldTypeSize =
2739      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2740  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2741  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2742  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2743  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2744  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2745
2746  const uint32_t reg_number_;
2747};
2748
2749// The exit instruction is the only instruction of the exit block.
2750// Instructions aborting the method (HThrow and HReturn) must branch to the
2751// exit block.
2752class HExit FINAL : public HTemplateInstruction<0> {
2753 public:
2754  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2755
2756  bool IsControlFlow() const OVERRIDE { return true; }
2757
2758  DECLARE_INSTRUCTION(Exit);
2759
2760 protected:
2761  DEFAULT_COPY_CONSTRUCTOR(Exit);
2762};
2763
2764// Jumps from one block to another.
2765class HGoto FINAL : public HTemplateInstruction<0> {
2766 public:
2767  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2768
2769  bool IsClonable() const OVERRIDE { return true; }
2770  bool IsControlFlow() const OVERRIDE { return true; }
2771
2772  HBasicBlock* GetSuccessor() const {
2773    return GetBlock()->GetSingleSuccessor();
2774  }
2775
2776  DECLARE_INSTRUCTION(Goto);
2777
2778 protected:
2779  DEFAULT_COPY_CONSTRUCTOR(Goto);
2780};
2781
2782class HConstant : public HExpression<0> {
2783 public:
2784  explicit HConstant(DataType::Type type, uint32_t dex_pc = kNoDexPc)
2785      : HExpression(type, SideEffects::None(), dex_pc) {}
2786
2787  bool CanBeMoved() const OVERRIDE { return true; }
2788
2789  // Is this constant -1 in the arithmetic sense?
2790  virtual bool IsMinusOne() const { return false; }
2791  // Is this constant 0 in the arithmetic sense?
2792  virtual bool IsArithmeticZero() const { return false; }
2793  // Is this constant a 0-bit pattern?
2794  virtual bool IsZeroBitPattern() const { return false; }
2795  // Is this constant 1 in the arithmetic sense?
2796  virtual bool IsOne() const { return false; }
2797
2798  virtual uint64_t GetValueAsUint64() const = 0;
2799
2800  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2801
2802 protected:
2803  DEFAULT_COPY_CONSTRUCTOR(Constant);
2804};
2805
2806class HNullConstant FINAL : public HConstant {
2807 public:
2808  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2809    return true;
2810  }
2811
2812  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2813
2814  size_t ComputeHashCode() const OVERRIDE { return 0; }
2815
2816  // The null constant representation is a 0-bit pattern.
2817  virtual bool IsZeroBitPattern() const { return true; }
2818
2819  DECLARE_INSTRUCTION(NullConstant);
2820
2821 protected:
2822  DEFAULT_COPY_CONSTRUCTOR(NullConstant);
2823
2824 private:
2825  explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
2826      : HConstant(DataType::Type::kReference, dex_pc) {}
2827
2828  friend class HGraph;
2829};
2830
2831// Constants of the type int. Those can be from Dex instructions, or
2832// synthesized (for example with the if-eqz instruction).
2833class HIntConstant FINAL : public HConstant {
2834 public:
2835  int32_t GetValue() const { return value_; }
2836
2837  uint64_t GetValueAsUint64() const OVERRIDE {
2838    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2839  }
2840
2841  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2842    DCHECK(other->IsIntConstant()) << other->DebugName();
2843    return other->AsIntConstant()->value_ == value_;
2844  }
2845
2846  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2847
2848  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2849  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2850  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2851  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2852
2853  // Integer constants are used to encode Boolean values as well,
2854  // where 1 means true and 0 means false.
2855  bool IsTrue() const { return GetValue() == 1; }
2856  bool IsFalse() const { return GetValue() == 0; }
2857
2858  DECLARE_INSTRUCTION(IntConstant);
2859
2860 protected:
2861  DEFAULT_COPY_CONSTRUCTOR(IntConstant);
2862
2863 private:
2864  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2865      : HConstant(DataType::Type::kInt32, dex_pc), value_(value) {}
2866  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2867      : HConstant(DataType::Type::kInt32, dex_pc), value_(value ? 1 : 0) {}
2868
2869  const int32_t value_;
2870
2871  friend class HGraph;
2872  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2873  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2874};
2875
2876class HLongConstant FINAL : public HConstant {
2877 public:
2878  int64_t GetValue() const { return value_; }
2879
2880  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2881
2882  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2883    DCHECK(other->IsLongConstant()) << other->DebugName();
2884    return other->AsLongConstant()->value_ == value_;
2885  }
2886
2887  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2888
2889  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2890  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2891  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2892  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2893
2894  DECLARE_INSTRUCTION(LongConstant);
2895
2896 protected:
2897  DEFAULT_COPY_CONSTRUCTOR(LongConstant);
2898
2899 private:
2900  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2901      : HConstant(DataType::Type::kInt64, dex_pc), value_(value) {}
2902
2903  const int64_t value_;
2904
2905  friend class HGraph;
2906};
2907
2908class HFloatConstant FINAL : public HConstant {
2909 public:
2910  float GetValue() const { return value_; }
2911
2912  uint64_t GetValueAsUint64() const OVERRIDE {
2913    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2914  }
2915
2916  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2917    DCHECK(other->IsFloatConstant()) << other->DebugName();
2918    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2919  }
2920
2921  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2922
2923  bool IsMinusOne() const OVERRIDE {
2924    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2925  }
2926  bool IsArithmeticZero() const OVERRIDE {
2927    return std::fpclassify(value_) == FP_ZERO;
2928  }
2929  bool IsArithmeticPositiveZero() const {
2930    return IsArithmeticZero() && !std::signbit(value_);
2931  }
2932  bool IsArithmeticNegativeZero() const {
2933    return IsArithmeticZero() && std::signbit(value_);
2934  }
2935  bool IsZeroBitPattern() const OVERRIDE {
2936    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2937  }
2938  bool IsOne() const OVERRIDE {
2939    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2940  }
2941  bool IsNaN() const {
2942    return std::isnan(value_);
2943  }
2944
2945  DECLARE_INSTRUCTION(FloatConstant);
2946
2947 protected:
2948  DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
2949
2950 private:
2951  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2952      : HConstant(DataType::Type::kFloat32, dex_pc), value_(value) {}
2953  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2954      : HConstant(DataType::Type::kFloat32, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2955
2956  const float value_;
2957
2958  // Only the SsaBuilder and HGraph can create floating-point constants.
2959  friend class SsaBuilder;
2960  friend class HGraph;
2961};
2962
2963class HDoubleConstant FINAL : public HConstant {
2964 public:
2965  double GetValue() const { return value_; }
2966
2967  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2968
2969  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2970    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2971    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2972  }
2973
2974  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2975
2976  bool IsMinusOne() const OVERRIDE {
2977    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2978  }
2979  bool IsArithmeticZero() const OVERRIDE {
2980    return std::fpclassify(value_) == FP_ZERO;
2981  }
2982  bool IsArithmeticPositiveZero() const {
2983    return IsArithmeticZero() && !std::signbit(value_);
2984  }
2985  bool IsArithmeticNegativeZero() const {
2986    return IsArithmeticZero() && std::signbit(value_);
2987  }
2988  bool IsZeroBitPattern() const OVERRIDE {
2989    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2990  }
2991  bool IsOne() const OVERRIDE {
2992    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2993  }
2994  bool IsNaN() const {
2995    return std::isnan(value_);
2996  }
2997
2998  DECLARE_INSTRUCTION(DoubleConstant);
2999
3000 protected:
3001  DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3002
3003 private:
3004  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
3005      : HConstant(DataType::Type::kFloat64, dex_pc), value_(value) {}
3006  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3007      : HConstant(DataType::Type::kFloat64, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
3008
3009  const double value_;
3010
3011  // Only the SsaBuilder and HGraph can create floating-point constants.
3012  friend class SsaBuilder;
3013  friend class HGraph;
3014};
3015
3016// Conditional branch. A block ending with an HIf instruction must have
3017// two successors.
3018class HIf FINAL : public HTemplateInstruction<1> {
3019 public:
3020  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
3021      : HTemplateInstruction(SideEffects::None(), dex_pc) {
3022    SetRawInputAt(0, input);
3023  }
3024
3025  bool IsClonable() const OVERRIDE { return true; }
3026  bool IsControlFlow() const OVERRIDE { return true; }
3027
3028  HBasicBlock* IfTrueSuccessor() const {
3029    return GetBlock()->GetSuccessors()[0];
3030  }
3031
3032  HBasicBlock* IfFalseSuccessor() const {
3033    return GetBlock()->GetSuccessors()[1];
3034  }
3035
3036  DECLARE_INSTRUCTION(If);
3037
3038 protected:
3039  DEFAULT_COPY_CONSTRUCTOR(If);
3040};
3041
3042
3043// Abstract instruction which marks the beginning and/or end of a try block and
3044// links it to the respective exception handlers. Behaves the same as a Goto in
3045// non-exceptional control flow.
3046// Normal-flow successor is stored at index zero, exception handlers under
3047// higher indices in no particular order.
3048class HTryBoundary FINAL : public HTemplateInstruction<0> {
3049 public:
3050  enum class BoundaryKind {
3051    kEntry,
3052    kExit,
3053    kLast = kExit
3054  };
3055
3056  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3057      : HTemplateInstruction(SideEffects::None(), dex_pc) {
3058    SetPackedField<BoundaryKindField>(kind);
3059  }
3060
3061  bool IsControlFlow() const OVERRIDE { return true; }
3062
3063  // Returns the block's non-exceptional successor (index zero).
3064  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3065
3066  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3067    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3068  }
3069
3070  // Returns whether `handler` is among its exception handlers (non-zero index
3071  // successors).
3072  bool HasExceptionHandler(const HBasicBlock& handler) const {
3073    DCHECK(handler.IsCatchBlock());
3074    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3075  }
3076
3077  // If not present already, adds `handler` to its block's list of exception
3078  // handlers.
3079  void AddExceptionHandler(HBasicBlock* handler) {
3080    if (!HasExceptionHandler(*handler)) {
3081      GetBlock()->AddSuccessor(handler);
3082    }
3083  }
3084
3085  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3086  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3087
3088  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3089
3090  DECLARE_INSTRUCTION(TryBoundary);
3091
3092 protected:
3093  DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3094
3095 private:
3096  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3097  static constexpr size_t kFieldBoundaryKindSize =
3098      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3099  static constexpr size_t kNumberOfTryBoundaryPackedBits =
3100      kFieldBoundaryKind + kFieldBoundaryKindSize;
3101  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3102                "Too many packed fields.");
3103  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3104};
3105
3106// Deoptimize to interpreter, upon checking a condition.
3107class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3108 public:
3109  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3110  // across.
3111  HDeoptimize(ArenaAllocator* allocator,
3112              HInstruction* cond,
3113              DeoptimizationKind kind,
3114              uint32_t dex_pc)
3115      : HVariableInputSizeInstruction(
3116            SideEffects::All(),
3117            dex_pc,
3118            allocator,
3119            /* number_of_inputs */ 1,
3120            kArenaAllocMisc) {
3121    SetPackedFlag<kFieldCanBeMoved>(false);
3122    SetPackedField<DeoptimizeKindField>(kind);
3123    SetRawInputAt(0, cond);
3124  }
3125
3126  bool IsClonable() const OVERRIDE { return true; }
3127
3128  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3129  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3130  // instead of `guard`.
3131  // We set CanTriggerGC to prevent any intermediate address to be live
3132  // at the point of the `HDeoptimize`.
3133  HDeoptimize(ArenaAllocator* allocator,
3134              HInstruction* cond,
3135              HInstruction* guard,
3136              DeoptimizationKind kind,
3137              uint32_t dex_pc)
3138      : HVariableInputSizeInstruction(
3139            SideEffects::CanTriggerGC(),
3140            dex_pc,
3141            allocator,
3142            /* number_of_inputs */ 2,
3143            kArenaAllocMisc) {
3144    SetPackedFlag<kFieldCanBeMoved>(true);
3145    SetPackedField<DeoptimizeKindField>(kind);
3146    SetRawInputAt(0, cond);
3147    SetRawInputAt(1, guard);
3148  }
3149
3150  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3151
3152  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3153    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3154  }
3155
3156  bool NeedsEnvironment() const OVERRIDE { return true; }
3157
3158  bool CanThrow() const OVERRIDE { return true; }
3159
3160  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3161
3162  DataType::Type GetType() const OVERRIDE {
3163    return GuardsAnInput() ? GuardedInput()->GetType() : DataType::Type::kVoid;
3164  }
3165
3166  bool GuardsAnInput() const {
3167    return InputCount() == 2;
3168  }
3169
3170  HInstruction* GuardedInput() const {
3171    DCHECK(GuardsAnInput());
3172    return InputAt(1);
3173  }
3174
3175  void RemoveGuard() {
3176    RemoveInputAt(1);
3177  }
3178
3179  DECLARE_INSTRUCTION(Deoptimize);
3180
3181 protected:
3182  DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3183
3184 private:
3185  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3186  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3187  static constexpr size_t kFieldDeoptimizeKindSize =
3188      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3189  static constexpr size_t kNumberOfDeoptimizePackedBits =
3190      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3191  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3192                "Too many packed fields.");
3193  using DeoptimizeKindField =
3194      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3195};
3196
3197// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3198// The compiled code checks this flag value in a guard before devirtualized call and
3199// if it's true, starts to do deoptimization.
3200// It has a 4-byte slot on stack.
3201// TODO: allocate a register for this flag.
3202class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3203 public:
3204  // CHA guards are only optimized in a separate pass and it has no side effects
3205  // with regard to other passes.
3206  HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3207      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, allocator, 0, kArenaAllocCHA) {
3208  }
3209
3210  DataType::Type GetType() const OVERRIDE { return DataType::Type::kInt32; }
3211
3212  // We do all CHA guard elimination/motion in a single pass, after which there is no
3213  // further guard elimination/motion since a guard might have been used for justification
3214  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3215  // to avoid other optimizations trying to move it.
3216  bool CanBeMoved() const OVERRIDE { return false; }
3217
3218  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3219
3220 protected:
3221  DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3222};
3223
3224// Represents the ArtMethod that was passed as a first argument to
3225// the method. It is used by instructions that depend on it, like
3226// instructions that work with the dex cache.
3227class HCurrentMethod FINAL : public HExpression<0> {
3228 public:
3229  explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3230      : HExpression(type, SideEffects::None(), dex_pc) {}
3231
3232  DECLARE_INSTRUCTION(CurrentMethod);
3233
3234 protected:
3235  DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3236};
3237
3238// Fetches an ArtMethod from the virtual table or the interface method table
3239// of a class.
3240class HClassTableGet FINAL : public HExpression<1> {
3241 public:
3242  enum class TableKind {
3243    kVTable,
3244    kIMTable,
3245    kLast = kIMTable
3246  };
3247  HClassTableGet(HInstruction* cls,
3248                 DataType::Type type,
3249                 TableKind kind,
3250                 size_t index,
3251                 uint32_t dex_pc)
3252      : HExpression(type, SideEffects::None(), dex_pc),
3253        index_(index) {
3254    SetPackedField<TableKindField>(kind);
3255    SetRawInputAt(0, cls);
3256  }
3257
3258  bool IsClonable() const OVERRIDE { return true; }
3259  bool CanBeMoved() const OVERRIDE { return true; }
3260  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3261    return other->AsClassTableGet()->GetIndex() == index_ &&
3262        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3263  }
3264
3265  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3266  size_t GetIndex() const { return index_; }
3267
3268  DECLARE_INSTRUCTION(ClassTableGet);
3269
3270 protected:
3271  DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3272
3273 private:
3274  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3275  static constexpr size_t kFieldTableKindSize =
3276      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3277  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3278  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3279                "Too many packed fields.");
3280  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3281
3282  // The index of the ArtMethod in the table.
3283  const size_t index_;
3284};
3285
3286// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3287// have one successor for each entry in the switch table, and the final successor
3288// will be the block containing the next Dex opcode.
3289class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3290 public:
3291  HPackedSwitch(int32_t start_value,
3292                uint32_t num_entries,
3293                HInstruction* input,
3294                uint32_t dex_pc = kNoDexPc)
3295    : HTemplateInstruction(SideEffects::None(), dex_pc),
3296      start_value_(start_value),
3297      num_entries_(num_entries) {
3298    SetRawInputAt(0, input);
3299  }
3300
3301  bool IsClonable() const OVERRIDE { return true; }
3302
3303  bool IsControlFlow() const OVERRIDE { return true; }
3304
3305  int32_t GetStartValue() const { return start_value_; }
3306
3307  uint32_t GetNumEntries() const { return num_entries_; }
3308
3309  HBasicBlock* GetDefaultBlock() const {
3310    // Last entry is the default block.
3311    return GetBlock()->GetSuccessors()[num_entries_];
3312  }
3313  DECLARE_INSTRUCTION(PackedSwitch);
3314
3315 protected:
3316  DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3317
3318 private:
3319  const int32_t start_value_;
3320  const uint32_t num_entries_;
3321};
3322
3323class HUnaryOperation : public HExpression<1> {
3324 public:
3325  HUnaryOperation(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3326      : HExpression(result_type, SideEffects::None(), dex_pc) {
3327    SetRawInputAt(0, input);
3328  }
3329
3330  // All of the UnaryOperation instructions are clonable.
3331  bool IsClonable() const OVERRIDE { return true; }
3332
3333  HInstruction* GetInput() const { return InputAt(0); }
3334  DataType::Type GetResultType() const { return GetType(); }
3335
3336  bool CanBeMoved() const OVERRIDE { return true; }
3337  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3338    return true;
3339  }
3340
3341  // Try to statically evaluate `this` and return a HConstant
3342  // containing the result of this evaluation.  If `this` cannot
3343  // be evaluated as a constant, return null.
3344  HConstant* TryStaticEvaluation() const;
3345
3346  // Apply this operation to `x`.
3347  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3348  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3349  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3350  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3351
3352  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3353
3354 protected:
3355  DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3356};
3357
3358class HBinaryOperation : public HExpression<2> {
3359 public:
3360  HBinaryOperation(DataType::Type result_type,
3361                   HInstruction* left,
3362                   HInstruction* right,
3363                   SideEffects side_effects = SideEffects::None(),
3364                   uint32_t dex_pc = kNoDexPc)
3365      : HExpression(result_type, side_effects, dex_pc) {
3366    SetRawInputAt(0, left);
3367    SetRawInputAt(1, right);
3368  }
3369
3370  // All of the BinaryOperation instructions are clonable.
3371  bool IsClonable() const OVERRIDE { return true; }
3372
3373  HInstruction* GetLeft() const { return InputAt(0); }
3374  HInstruction* GetRight() const { return InputAt(1); }
3375  DataType::Type GetResultType() const { return GetType(); }
3376
3377  virtual bool IsCommutative() const { return false; }
3378
3379  // Put constant on the right.
3380  // Returns whether order is changed.
3381  bool OrderInputsWithConstantOnTheRight() {
3382    HInstruction* left = InputAt(0);
3383    HInstruction* right = InputAt(1);
3384    if (left->IsConstant() && !right->IsConstant()) {
3385      ReplaceInput(right, 0);
3386      ReplaceInput(left, 1);
3387      return true;
3388    }
3389    return false;
3390  }
3391
3392  // Order inputs by instruction id, but favor constant on the right side.
3393  // This helps GVN for commutative ops.
3394  void OrderInputs() {
3395    DCHECK(IsCommutative());
3396    HInstruction* left = InputAt(0);
3397    HInstruction* right = InputAt(1);
3398    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3399      return;
3400    }
3401    if (OrderInputsWithConstantOnTheRight()) {
3402      return;
3403    }
3404    // Order according to instruction id.
3405    if (left->GetId() > right->GetId()) {
3406      ReplaceInput(right, 0);
3407      ReplaceInput(left, 1);
3408    }
3409  }
3410
3411  bool CanBeMoved() const OVERRIDE { return true; }
3412  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3413    return true;
3414  }
3415
3416  // Try to statically evaluate `this` and return a HConstant
3417  // containing the result of this evaluation.  If `this` cannot
3418  // be evaluated as a constant, return null.
3419  HConstant* TryStaticEvaluation() const;
3420
3421  // Apply this operation to `x` and `y`.
3422  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3423                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3424    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3425    UNREACHABLE();
3426  }
3427  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3428  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3429  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3430                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3431    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3432    UNREACHABLE();
3433  }
3434  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3435  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3436
3437  // Returns an input that can legally be used as the right input and is
3438  // constant, or null.
3439  HConstant* GetConstantRight() const;
3440
3441  // If `GetConstantRight()` returns one of the input, this returns the other
3442  // one. Otherwise it returns null.
3443  HInstruction* GetLeastConstantLeft() const;
3444
3445  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3446
3447 protected:
3448  DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3449};
3450
3451// The comparison bias applies for floating point operations and indicates how NaN
3452// comparisons are treated:
3453enum class ComparisonBias {
3454  kNoBias,  // bias is not applicable (i.e. for long operation)
3455  kGtBias,  // return 1 for NaN comparisons
3456  kLtBias,  // return -1 for NaN comparisons
3457  kLast = kLtBias
3458};
3459
3460std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3461
3462class HCondition : public HBinaryOperation {
3463 public:
3464  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3465      : HBinaryOperation(DataType::Type::kBool, first, second, SideEffects::None(), dex_pc) {
3466    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3467  }
3468
3469  // For code generation purposes, returns whether this instruction is just before
3470  // `instruction`, and disregard moves in between.
3471  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3472
3473  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3474
3475  virtual IfCondition GetCondition() const = 0;
3476
3477  virtual IfCondition GetOppositeCondition() const = 0;
3478
3479  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3480  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3481
3482  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3483  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3484
3485  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3486    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3487  }
3488
3489  bool IsFPConditionTrueIfNaN() const {
3490    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3491    IfCondition if_cond = GetCondition();
3492    if (if_cond == kCondNE) {
3493      return true;
3494    } else if (if_cond == kCondEQ) {
3495      return false;
3496    }
3497    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3498  }
3499
3500  bool IsFPConditionFalseIfNaN() const {
3501    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3502    IfCondition if_cond = GetCondition();
3503    if (if_cond == kCondEQ) {
3504      return true;
3505    } else if (if_cond == kCondNE) {
3506      return false;
3507    }
3508    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3509  }
3510
3511 protected:
3512  // Needed if we merge a HCompare into a HCondition.
3513  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3514  static constexpr size_t kFieldComparisonBiasSize =
3515      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3516  static constexpr size_t kNumberOfConditionPackedBits =
3517      kFieldComparisonBias + kFieldComparisonBiasSize;
3518  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3519  using ComparisonBiasField =
3520      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3521
3522  template <typename T>
3523  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3524
3525  template <typename T>
3526  int32_t CompareFP(T x, T y) const {
3527    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3528    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3529    // Handle the bias.
3530    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3531  }
3532
3533  // Return an integer constant containing the result of a condition evaluated at compile time.
3534  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3535    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3536  }
3537
3538  DEFAULT_COPY_CONSTRUCTOR(Condition);
3539};
3540
3541// Instruction to check if two inputs are equal to each other.
3542class HEqual FINAL : public HCondition {
3543 public:
3544  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3545      : HCondition(first, second, dex_pc) {}
3546
3547  bool IsCommutative() const OVERRIDE { return true; }
3548
3549  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3550                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3551    return MakeConstantCondition(true, GetDexPc());
3552  }
3553  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3554    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3555  }
3556  // In the following Evaluate methods, a HCompare instruction has
3557  // been merged into this HEqual instruction; evaluate it as
3558  // `Compare(x, y) == 0`.
3559  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3560    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3561                                 GetDexPc());
3562  }
3563  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3564    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3565  }
3566  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3567    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3568  }
3569
3570  DECLARE_INSTRUCTION(Equal);
3571
3572  IfCondition GetCondition() const OVERRIDE {
3573    return kCondEQ;
3574  }
3575
3576  IfCondition GetOppositeCondition() const OVERRIDE {
3577    return kCondNE;
3578  }
3579
3580 protected:
3581  DEFAULT_COPY_CONSTRUCTOR(Equal);
3582
3583 private:
3584  template <typename T> static bool Compute(T x, T y) { return x == y; }
3585};
3586
3587class HNotEqual FINAL : public HCondition {
3588 public:
3589  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3590      : HCondition(first, second, dex_pc) {}
3591
3592  bool IsCommutative() const OVERRIDE { return true; }
3593
3594  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3595                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3596    return MakeConstantCondition(false, GetDexPc());
3597  }
3598  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3599    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3600  }
3601  // In the following Evaluate methods, a HCompare instruction has
3602  // been merged into this HNotEqual instruction; evaluate it as
3603  // `Compare(x, y) != 0`.
3604  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3605    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3606  }
3607  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3608    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3609  }
3610  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3611    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3612  }
3613
3614  DECLARE_INSTRUCTION(NotEqual);
3615
3616  IfCondition GetCondition() const OVERRIDE {
3617    return kCondNE;
3618  }
3619
3620  IfCondition GetOppositeCondition() const OVERRIDE {
3621    return kCondEQ;
3622  }
3623
3624 protected:
3625  DEFAULT_COPY_CONSTRUCTOR(NotEqual);
3626
3627 private:
3628  template <typename T> static bool Compute(T x, T y) { return x != y; }
3629};
3630
3631class HLessThan FINAL : public HCondition {
3632 public:
3633  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3634      : HCondition(first, second, dex_pc) {}
3635
3636  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3637    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3638  }
3639  // In the following Evaluate methods, a HCompare instruction has
3640  // been merged into this HLessThan instruction; evaluate it as
3641  // `Compare(x, y) < 0`.
3642  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3643    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3644  }
3645  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3646    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3647  }
3648  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3649    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3650  }
3651
3652  DECLARE_INSTRUCTION(LessThan);
3653
3654  IfCondition GetCondition() const OVERRIDE {
3655    return kCondLT;
3656  }
3657
3658  IfCondition GetOppositeCondition() const OVERRIDE {
3659    return kCondGE;
3660  }
3661
3662 protected:
3663  DEFAULT_COPY_CONSTRUCTOR(LessThan);
3664
3665 private:
3666  template <typename T> static bool Compute(T x, T y) { return x < y; }
3667};
3668
3669class HLessThanOrEqual FINAL : public HCondition {
3670 public:
3671  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3672      : HCondition(first, second, dex_pc) {}
3673
3674  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3675    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3676  }
3677  // In the following Evaluate methods, a HCompare instruction has
3678  // been merged into this HLessThanOrEqual instruction; evaluate it as
3679  // `Compare(x, y) <= 0`.
3680  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3681    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3682  }
3683  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3684    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3685  }
3686  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3687    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3688  }
3689
3690  DECLARE_INSTRUCTION(LessThanOrEqual);
3691
3692  IfCondition GetCondition() const OVERRIDE {
3693    return kCondLE;
3694  }
3695
3696  IfCondition GetOppositeCondition() const OVERRIDE {
3697    return kCondGT;
3698  }
3699
3700 protected:
3701  DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
3702
3703 private:
3704  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3705};
3706
3707class HGreaterThan FINAL : public HCondition {
3708 public:
3709  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3710      : HCondition(first, second, dex_pc) {}
3711
3712  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3713    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3714  }
3715  // In the following Evaluate methods, a HCompare instruction has
3716  // been merged into this HGreaterThan instruction; evaluate it as
3717  // `Compare(x, y) > 0`.
3718  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3719    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3720  }
3721  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3722    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3723  }
3724  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3725    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3726  }
3727
3728  DECLARE_INSTRUCTION(GreaterThan);
3729
3730  IfCondition GetCondition() const OVERRIDE {
3731    return kCondGT;
3732  }
3733
3734  IfCondition GetOppositeCondition() const OVERRIDE {
3735    return kCondLE;
3736  }
3737
3738 protected:
3739  DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
3740
3741 private:
3742  template <typename T> static bool Compute(T x, T y) { return x > y; }
3743};
3744
3745class HGreaterThanOrEqual FINAL : public HCondition {
3746 public:
3747  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3748      : HCondition(first, second, dex_pc) {}
3749
3750  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3751    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3752  }
3753  // In the following Evaluate methods, a HCompare instruction has
3754  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3755  // `Compare(x, y) >= 0`.
3756  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3757    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3758  }
3759  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3760    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3761  }
3762  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3763    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3764  }
3765
3766  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3767
3768  IfCondition GetCondition() const OVERRIDE {
3769    return kCondGE;
3770  }
3771
3772  IfCondition GetOppositeCondition() const OVERRIDE {
3773    return kCondLT;
3774  }
3775
3776 protected:
3777  DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
3778
3779 private:
3780  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3781};
3782
3783class HBelow FINAL : public HCondition {
3784 public:
3785  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3786      : HCondition(first, second, dex_pc) {}
3787
3788  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3789    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3790  }
3791  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3792    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3793  }
3794  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3795                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3796    LOG(FATAL) << DebugName() << " is not defined for float values";
3797    UNREACHABLE();
3798  }
3799  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3800                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3801    LOG(FATAL) << DebugName() << " is not defined for double values";
3802    UNREACHABLE();
3803  }
3804
3805  DECLARE_INSTRUCTION(Below);
3806
3807  IfCondition GetCondition() const OVERRIDE {
3808    return kCondB;
3809  }
3810
3811  IfCondition GetOppositeCondition() const OVERRIDE {
3812    return kCondAE;
3813  }
3814
3815 protected:
3816  DEFAULT_COPY_CONSTRUCTOR(Below);
3817
3818 private:
3819  template <typename T> static bool Compute(T x, T y) {
3820    return MakeUnsigned(x) < MakeUnsigned(y);
3821  }
3822};
3823
3824class HBelowOrEqual FINAL : public HCondition {
3825 public:
3826  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3827      : HCondition(first, second, dex_pc) {}
3828
3829  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3830    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3831  }
3832  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3833    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3834  }
3835  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3836                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3837    LOG(FATAL) << DebugName() << " is not defined for float values";
3838    UNREACHABLE();
3839  }
3840  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3841                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3842    LOG(FATAL) << DebugName() << " is not defined for double values";
3843    UNREACHABLE();
3844  }
3845
3846  DECLARE_INSTRUCTION(BelowOrEqual);
3847
3848  IfCondition GetCondition() const OVERRIDE {
3849    return kCondBE;
3850  }
3851
3852  IfCondition GetOppositeCondition() const OVERRIDE {
3853    return kCondA;
3854  }
3855
3856 protected:
3857  DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
3858
3859 private:
3860  template <typename T> static bool Compute(T x, T y) {
3861    return MakeUnsigned(x) <= MakeUnsigned(y);
3862  }
3863};
3864
3865class HAbove FINAL : public HCondition {
3866 public:
3867  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3868      : HCondition(first, second, dex_pc) {}
3869
3870  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3871    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3872  }
3873  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3874    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3875  }
3876  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3877                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3878    LOG(FATAL) << DebugName() << " is not defined for float values";
3879    UNREACHABLE();
3880  }
3881  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3882                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3883    LOG(FATAL) << DebugName() << " is not defined for double values";
3884    UNREACHABLE();
3885  }
3886
3887  DECLARE_INSTRUCTION(Above);
3888
3889  IfCondition GetCondition() const OVERRIDE {
3890    return kCondA;
3891  }
3892
3893  IfCondition GetOppositeCondition() const OVERRIDE {
3894    return kCondBE;
3895  }
3896
3897 protected:
3898  DEFAULT_COPY_CONSTRUCTOR(Above);
3899
3900 private:
3901  template <typename T> static bool Compute(T x, T y) {
3902    return MakeUnsigned(x) > MakeUnsigned(y);
3903  }
3904};
3905
3906class HAboveOrEqual FINAL : public HCondition {
3907 public:
3908  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3909      : HCondition(first, second, dex_pc) {}
3910
3911  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3912    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3913  }
3914  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3915    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3916  }
3917  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3918                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3919    LOG(FATAL) << DebugName() << " is not defined for float values";
3920    UNREACHABLE();
3921  }
3922  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3923                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3924    LOG(FATAL) << DebugName() << " is not defined for double values";
3925    UNREACHABLE();
3926  }
3927
3928  DECLARE_INSTRUCTION(AboveOrEqual);
3929
3930  IfCondition GetCondition() const OVERRIDE {
3931    return kCondAE;
3932  }
3933
3934  IfCondition GetOppositeCondition() const OVERRIDE {
3935    return kCondB;
3936  }
3937
3938 protected:
3939  DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
3940
3941 private:
3942  template <typename T> static bool Compute(T x, T y) {
3943    return MakeUnsigned(x) >= MakeUnsigned(y);
3944  }
3945};
3946
3947// Instruction to check how two inputs compare to each other.
3948// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3949class HCompare FINAL : public HBinaryOperation {
3950 public:
3951  // Note that `comparison_type` is the type of comparison performed
3952  // between the comparison's inputs, not the type of the instantiated
3953  // HCompare instruction (which is always DataType::Type::kInt).
3954  HCompare(DataType::Type comparison_type,
3955           HInstruction* first,
3956           HInstruction* second,
3957           ComparisonBias bias,
3958           uint32_t dex_pc)
3959      : HBinaryOperation(DataType::Type::kInt32,
3960                         first,
3961                         second,
3962                         SideEffectsForArchRuntimeCalls(comparison_type),
3963                         dex_pc) {
3964    SetPackedField<ComparisonBiasField>(bias);
3965    DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
3966    DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
3967  }
3968
3969  template <typename T>
3970  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3971
3972  template <typename T>
3973  int32_t ComputeFP(T x, T y) const {
3974    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3975    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3976    // Handle the bias.
3977    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3978  }
3979
3980  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3981    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3982    // reach this code path when processing a freshly built HIR
3983    // graph. However HCompare integer instructions can be synthesized
3984    // by the instruction simplifier to implement IntegerCompare and
3985    // IntegerSignum intrinsics, so we have to handle this case.
3986    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3987  }
3988  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3989    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3990  }
3991  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3992    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3993  }
3994  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3995    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3996  }
3997
3998  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3999    return GetPackedFields() == other->AsCompare()->GetPackedFields();
4000  }
4001
4002  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4003
4004  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4005  // Only meaningful for floating-point comparisons.
4006  bool IsGtBias() const {
4007    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4008    return GetBias() == ComparisonBias::kGtBias;
4009  }
4010
4011  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4012    // Comparisons do not require a runtime call in any back end.
4013    return SideEffects::None();
4014  }
4015
4016  DECLARE_INSTRUCTION(Compare);
4017
4018 protected:
4019  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
4020  static constexpr size_t kFieldComparisonBiasSize =
4021      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4022  static constexpr size_t kNumberOfComparePackedBits =
4023      kFieldComparisonBias + kFieldComparisonBiasSize;
4024  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4025  using ComparisonBiasField =
4026      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4027
4028  // Return an integer constant containing the result of a comparison evaluated at compile time.
4029  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4030    DCHECK(value == -1 || value == 0 || value == 1) << value;
4031    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4032  }
4033
4034  DEFAULT_COPY_CONSTRUCTOR(Compare);
4035};
4036
4037class HNewInstance FINAL : public HExpression<1> {
4038 public:
4039  HNewInstance(HInstruction* cls,
4040               uint32_t dex_pc,
4041               dex::TypeIndex type_index,
4042               const DexFile& dex_file,
4043               bool finalizable,
4044               QuickEntrypointEnum entrypoint)
4045      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc),
4046        type_index_(type_index),
4047        dex_file_(dex_file),
4048        entrypoint_(entrypoint) {
4049    SetPackedFlag<kFlagFinalizable>(finalizable);
4050    SetRawInputAt(0, cls);
4051  }
4052
4053  bool IsClonable() const OVERRIDE { return true; }
4054
4055  dex::TypeIndex GetTypeIndex() const { return type_index_; }
4056  const DexFile& GetDexFile() const { return dex_file_; }
4057
4058  // Calls runtime so needs an environment.
4059  bool NeedsEnvironment() const OVERRIDE { return true; }
4060
4061  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
4062  bool CanThrow() const OVERRIDE { return true; }
4063
4064  bool NeedsChecks() const {
4065    return entrypoint_ == kQuickAllocObjectWithChecks;
4066  }
4067
4068  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4069
4070  bool CanBeNull() const OVERRIDE { return false; }
4071
4072  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4073
4074  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4075    entrypoint_ = entrypoint;
4076  }
4077
4078  HLoadClass* GetLoadClass() const {
4079    HInstruction* input = InputAt(0);
4080    if (input->IsClinitCheck()) {
4081      input = input->InputAt(0);
4082    }
4083    DCHECK(input->IsLoadClass());
4084    return input->AsLoadClass();
4085  }
4086
4087  bool IsStringAlloc() const;
4088
4089  DECLARE_INSTRUCTION(NewInstance);
4090
4091 protected:
4092  DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4093
4094 private:
4095  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
4096  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
4097  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4098                "Too many packed fields.");
4099
4100  const dex::TypeIndex type_index_;
4101  const DexFile& dex_file_;
4102  QuickEntrypointEnum entrypoint_;
4103};
4104
4105enum IntrinsicNeedsEnvironmentOrCache {
4106  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
4107  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
4108};
4109
4110enum IntrinsicSideEffects {
4111  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
4112  kReadSideEffects,   // Intrinsic may read heap memory.
4113  kWriteSideEffects,  // Intrinsic may write heap memory.
4114  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
4115};
4116
4117enum IntrinsicExceptions {
4118  kNoThrow,  // Intrinsic does not throw any exceptions.
4119  kCanThrow  // Intrinsic may throw exceptions.
4120};
4121
4122class HInvoke : public HVariableInputSizeInstruction {
4123 public:
4124  bool NeedsEnvironment() const OVERRIDE;
4125
4126  void SetArgumentAt(size_t index, HInstruction* argument) {
4127    SetRawInputAt(index, argument);
4128  }
4129
4130  // Return the number of arguments.  This number can be lower than
4131  // the number of inputs returned by InputCount(), as some invoke
4132  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4133  // inputs at the end of their list of inputs.
4134  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4135
4136  DataType::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4137
4138  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4139
4140  InvokeType GetInvokeType() const {
4141    return GetPackedField<InvokeTypeField>();
4142  }
4143
4144  Intrinsics GetIntrinsic() const {
4145    return intrinsic_;
4146  }
4147
4148  void SetIntrinsic(Intrinsics intrinsic,
4149                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4150                    IntrinsicSideEffects side_effects,
4151                    IntrinsicExceptions exceptions);
4152
4153  bool IsFromInlinedInvoke() const {
4154    return GetEnvironment()->IsFromInlinedInvoke();
4155  }
4156
4157  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4158
4159  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4160
4161  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4162
4163  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4164    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4165  }
4166
4167  uint32_t* GetIntrinsicOptimizations() {
4168    return &intrinsic_optimizations_;
4169  }
4170
4171  const uint32_t* GetIntrinsicOptimizations() const {
4172    return &intrinsic_optimizations_;
4173  }
4174
4175  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4176
4177  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4178  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4179
4180  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4181
4182 protected:
4183  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4184  static constexpr size_t kFieldInvokeTypeSize =
4185      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4186  static constexpr size_t kFieldReturnType =
4187      kFieldInvokeType + kFieldInvokeTypeSize;
4188  static constexpr size_t kFieldReturnTypeSize =
4189      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4190  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4191  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4192  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4193  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4194  using ReturnTypeField = BitField<DataType::Type, kFieldReturnType, kFieldReturnTypeSize>;
4195
4196  HInvoke(ArenaAllocator* allocator,
4197          uint32_t number_of_arguments,
4198          uint32_t number_of_other_inputs,
4199          DataType::Type return_type,
4200          uint32_t dex_pc,
4201          uint32_t dex_method_index,
4202          ArtMethod* resolved_method,
4203          InvokeType invoke_type)
4204    : HVariableInputSizeInstruction(
4205          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4206          dex_pc,
4207          allocator,
4208          number_of_arguments + number_of_other_inputs,
4209          kArenaAllocInvokeInputs),
4210      number_of_arguments_(number_of_arguments),
4211      resolved_method_(resolved_method),
4212      dex_method_index_(dex_method_index),
4213      intrinsic_(Intrinsics::kNone),
4214      intrinsic_optimizations_(0) {
4215    SetPackedField<ReturnTypeField>(return_type);
4216    SetPackedField<InvokeTypeField>(invoke_type);
4217    SetPackedFlag<kFlagCanThrow>(true);
4218  }
4219
4220  DEFAULT_COPY_CONSTRUCTOR(Invoke);
4221
4222  uint32_t number_of_arguments_;
4223  ArtMethod* resolved_method_;
4224  const uint32_t dex_method_index_;
4225  Intrinsics intrinsic_;
4226
4227  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4228  uint32_t intrinsic_optimizations_;
4229};
4230
4231class HInvokeUnresolved FINAL : public HInvoke {
4232 public:
4233  HInvokeUnresolved(ArenaAllocator* allocator,
4234                    uint32_t number_of_arguments,
4235                    DataType::Type return_type,
4236                    uint32_t dex_pc,
4237                    uint32_t dex_method_index,
4238                    InvokeType invoke_type)
4239      : HInvoke(allocator,
4240                number_of_arguments,
4241                0u /* number_of_other_inputs */,
4242                return_type,
4243                dex_pc,
4244                dex_method_index,
4245                nullptr,
4246                invoke_type) {
4247  }
4248
4249  bool IsClonable() const OVERRIDE { return true; }
4250
4251  DECLARE_INSTRUCTION(InvokeUnresolved);
4252
4253 protected:
4254  DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4255};
4256
4257class HInvokePolymorphic FINAL : public HInvoke {
4258 public:
4259  HInvokePolymorphic(ArenaAllocator* allocator,
4260                     uint32_t number_of_arguments,
4261                     DataType::Type return_type,
4262                     uint32_t dex_pc,
4263                     uint32_t dex_method_index)
4264      : HInvoke(allocator,
4265                number_of_arguments,
4266                0u /* number_of_other_inputs */,
4267                return_type,
4268                dex_pc,
4269                dex_method_index,
4270                nullptr,
4271                kVirtual) {}
4272
4273  bool IsClonable() const OVERRIDE { return true; }
4274
4275  DECLARE_INSTRUCTION(InvokePolymorphic);
4276
4277 protected:
4278  DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4279};
4280
4281class HInvokeStaticOrDirect FINAL : public HInvoke {
4282 public:
4283  // Requirements of this method call regarding the class
4284  // initialization (clinit) check of its declaring class.
4285  enum class ClinitCheckRequirement {
4286    kNone,      // Class already initialized.
4287    kExplicit,  // Static call having explicit clinit check as last input.
4288    kImplicit,  // Static call implicitly requiring a clinit check.
4289    kLast = kImplicit
4290  };
4291
4292  // Determines how to load the target ArtMethod*.
4293  enum class MethodLoadKind {
4294    // Use a String init ArtMethod* loaded from Thread entrypoints.
4295    kStringInit,
4296
4297    // Use the method's own ArtMethod* loaded by the register allocator.
4298    kRecursive,
4299
4300    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4301    // Used for boot image methods referenced by boot image code.
4302    kBootImageLinkTimePcRelative,
4303
4304    // Use ArtMethod* at a known address, embed the direct address in the code.
4305    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4306    kDirectAddress,
4307
4308    // Load from an entry in the .bss section using a PC-relative load.
4309    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
4310    kBssEntry,
4311
4312    // Make a runtime call to resolve and call the method. This is the last-resort-kind
4313    // used when other kinds are unimplemented on a particular architecture.
4314    kRuntimeCall,
4315  };
4316
4317  // Determines the location of the code pointer.
4318  enum class CodePtrLocation {
4319    // Recursive call, use local PC-relative call instruction.
4320    kCallSelf,
4321
4322    // Use code pointer from the ArtMethod*.
4323    // Used when we don't know the target code. This is also the last-resort-kind used when
4324    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4325    kCallArtMethod,
4326  };
4327
4328  struct DispatchInfo {
4329    MethodLoadKind method_load_kind;
4330    CodePtrLocation code_ptr_location;
4331    // The method load data holds
4332    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4333    //     Note that there are multiple string init methods, each having its own offset.
4334    //   - the method address for kDirectAddress
4335    uint64_t method_load_data;
4336  };
4337
4338  HInvokeStaticOrDirect(ArenaAllocator* allocator,
4339                        uint32_t number_of_arguments,
4340                        DataType::Type return_type,
4341                        uint32_t dex_pc,
4342                        uint32_t method_index,
4343                        ArtMethod* resolved_method,
4344                        DispatchInfo dispatch_info,
4345                        InvokeType invoke_type,
4346                        MethodReference target_method,
4347                        ClinitCheckRequirement clinit_check_requirement)
4348      : HInvoke(allocator,
4349                number_of_arguments,
4350                // There is potentially one extra argument for the HCurrentMethod node, and
4351                // potentially one other if the clinit check is explicit, and potentially
4352                // one other if the method is a string factory.
4353                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4354                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4355                return_type,
4356                dex_pc,
4357                method_index,
4358                resolved_method,
4359                invoke_type),
4360        target_method_(target_method),
4361        dispatch_info_(dispatch_info) {
4362    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4363  }
4364
4365  bool IsClonable() const OVERRIDE { return true; }
4366
4367  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4368    bool had_current_method_input = HasCurrentMethodInput();
4369    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4370
4371    // Using the current method is the default and once we find a better
4372    // method load kind, we should not go back to using the current method.
4373    DCHECK(had_current_method_input || !needs_current_method_input);
4374
4375    if (had_current_method_input && !needs_current_method_input) {
4376      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4377      RemoveInputAt(GetSpecialInputIndex());
4378    }
4379    dispatch_info_ = dispatch_info;
4380  }
4381
4382  DispatchInfo GetDispatchInfo() const {
4383    return dispatch_info_;
4384  }
4385
4386  void AddSpecialInput(HInstruction* input) {
4387    // We allow only one special input.
4388    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4389    DCHECK(InputCount() == GetSpecialInputIndex() ||
4390           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4391    InsertInputAt(GetSpecialInputIndex(), input);
4392  }
4393
4394  using HInstruction::GetInputRecords;  // Keep the const version visible.
4395  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4396    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4397    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4398      DCHECK(!input_records.empty());
4399      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4400      HInstruction* last_input = input_records.back().GetInstruction();
4401      // Note: `last_input` may be null during arguments setup.
4402      if (last_input != nullptr) {
4403        // `last_input` is the last input of a static invoke marked as having
4404        // an explicit clinit check. It must either be:
4405        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4406        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4407        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4408      }
4409    }
4410    return input_records;
4411  }
4412
4413  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4414    // We access the method via the dex cache so we can't do an implicit null check.
4415    // TODO: for intrinsics we can generate implicit null checks.
4416    return false;
4417  }
4418
4419  bool CanBeNull() const OVERRIDE {
4420    return GetPackedField<ReturnTypeField>() == DataType::Type::kReference && !IsStringInit();
4421  }
4422
4423  // Get the index of the special input, if any.
4424  //
4425  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4426  // method pointer; otherwise there may be one platform-specific special input,
4427  // such as PC-relative addressing base.
4428  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4429  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4430
4431  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4432  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4433  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4434  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4435  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4436  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4437  bool HasPcRelativeMethodLoadKind() const {
4438    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4439           GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4440  }
4441  bool HasCurrentMethodInput() const {
4442    // This function can be called only after the invoke has been fully initialized by the builder.
4443    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4444      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4445      return true;
4446    } else {
4447      DCHECK(InputCount() == GetSpecialInputIndex() ||
4448             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4449      return false;
4450    }
4451  }
4452
4453  QuickEntrypointEnum GetStringInitEntryPoint() const {
4454    DCHECK(IsStringInit());
4455    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4456  }
4457
4458  uint64_t GetMethodAddress() const {
4459    DCHECK(HasMethodAddress());
4460    return dispatch_info_.method_load_data;
4461  }
4462
4463  const DexFile& GetDexFileForPcRelativeDexCache() const;
4464
4465  ClinitCheckRequirement GetClinitCheckRequirement() const {
4466    return GetPackedField<ClinitCheckRequirementField>();
4467  }
4468
4469  // Is this instruction a call to a static method?
4470  bool IsStatic() const {
4471    return GetInvokeType() == kStatic;
4472  }
4473
4474  MethodReference GetTargetMethod() const {
4475    return target_method_;
4476  }
4477
4478  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4479  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4480  // instruction; only relevant for static calls with explicit clinit check.
4481  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4482    DCHECK(IsStaticWithExplicitClinitCheck());
4483    size_t last_input_index = inputs_.size() - 1u;
4484    HInstruction* last_input = inputs_.back().GetInstruction();
4485    DCHECK(last_input != nullptr);
4486    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4487    RemoveAsUserOfInput(last_input_index);
4488    inputs_.pop_back();
4489    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4490    DCHECK(!IsStaticWithExplicitClinitCheck());
4491  }
4492
4493  // Is this a call to a static method whose declaring class has an
4494  // explicit initialization check in the graph?
4495  bool IsStaticWithExplicitClinitCheck() const {
4496    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4497  }
4498
4499  // Is this a call to a static method whose declaring class has an
4500  // implicit intialization check requirement?
4501  bool IsStaticWithImplicitClinitCheck() const {
4502    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4503  }
4504
4505  // Does this method load kind need the current method as an input?
4506  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4507    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4508  }
4509
4510  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4511
4512 protected:
4513  DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
4514
4515 private:
4516  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4517  static constexpr size_t kFieldClinitCheckRequirementSize =
4518      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4519  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4520      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4521  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4522                "Too many packed fields.");
4523  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4524                                               kFieldClinitCheckRequirement,
4525                                               kFieldClinitCheckRequirementSize>;
4526
4527  // Cached values of the resolved method, to avoid needing the mutator lock.
4528  MethodReference target_method_;
4529  DispatchInfo dispatch_info_;
4530};
4531std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4532std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4533
4534class HInvokeVirtual FINAL : public HInvoke {
4535 public:
4536  HInvokeVirtual(ArenaAllocator* allocator,
4537                 uint32_t number_of_arguments,
4538                 DataType::Type return_type,
4539                 uint32_t dex_pc,
4540                 uint32_t dex_method_index,
4541                 ArtMethod* resolved_method,
4542                 uint32_t vtable_index)
4543      : HInvoke(allocator,
4544                number_of_arguments,
4545                0u,
4546                return_type,
4547                dex_pc,
4548                dex_method_index,
4549                resolved_method,
4550                kVirtual),
4551        vtable_index_(vtable_index) {}
4552
4553  bool IsClonable() const OVERRIDE { return true; }
4554
4555  bool CanBeNull() const OVERRIDE {
4556    switch (GetIntrinsic()) {
4557      case Intrinsics::kThreadCurrentThread:
4558      case Intrinsics::kStringBufferAppend:
4559      case Intrinsics::kStringBufferToString:
4560      case Intrinsics::kStringBuilderAppend:
4561      case Intrinsics::kStringBuilderToString:
4562        return false;
4563      default:
4564        return HInvoke::CanBeNull();
4565    }
4566  }
4567
4568  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4569    // TODO: Add implicit null checks in intrinsics.
4570    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4571  }
4572
4573  uint32_t GetVTableIndex() const { return vtable_index_; }
4574
4575  DECLARE_INSTRUCTION(InvokeVirtual);
4576
4577 protected:
4578  DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
4579
4580 private:
4581  // Cached value of the resolved method, to avoid needing the mutator lock.
4582  const uint32_t vtable_index_;
4583};
4584
4585class HInvokeInterface FINAL : public HInvoke {
4586 public:
4587  HInvokeInterface(ArenaAllocator* allocator,
4588                   uint32_t number_of_arguments,
4589                   DataType::Type return_type,
4590                   uint32_t dex_pc,
4591                   uint32_t dex_method_index,
4592                   ArtMethod* resolved_method,
4593                   uint32_t imt_index)
4594      : HInvoke(allocator,
4595                number_of_arguments,
4596                0u,
4597                return_type,
4598                dex_pc,
4599                dex_method_index,
4600                resolved_method,
4601                kInterface),
4602        imt_index_(imt_index) {}
4603
4604  bool IsClonable() const OVERRIDE { return true; }
4605
4606  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4607    // TODO: Add implicit null checks in intrinsics.
4608    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4609  }
4610
4611  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4612    // The assembly stub currently needs it.
4613    return true;
4614  }
4615
4616  uint32_t GetImtIndex() const { return imt_index_; }
4617
4618  DECLARE_INSTRUCTION(InvokeInterface);
4619
4620 protected:
4621  DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
4622
4623 private:
4624  // Cached value of the resolved method, to avoid needing the mutator lock.
4625  const uint32_t imt_index_;
4626};
4627
4628class HNeg FINAL : public HUnaryOperation {
4629 public:
4630  HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4631      : HUnaryOperation(result_type, input, dex_pc) {
4632    DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4633  }
4634
4635  template <typename T> static T Compute(T x) { return -x; }
4636
4637  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4638    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4639  }
4640  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4641    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4642  }
4643  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4644    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4645  }
4646  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4647    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4648  }
4649
4650  DECLARE_INSTRUCTION(Neg);
4651
4652 protected:
4653  DEFAULT_COPY_CONSTRUCTOR(Neg);
4654};
4655
4656class HNewArray FINAL : public HExpression<2> {
4657 public:
4658  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4659      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4660    SetRawInputAt(0, cls);
4661    SetRawInputAt(1, length);
4662  }
4663
4664  bool IsClonable() const OVERRIDE { return true; }
4665
4666  // Calls runtime so needs an environment.
4667  bool NeedsEnvironment() const OVERRIDE { return true; }
4668
4669  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4670  bool CanThrow() const OVERRIDE { return true; }
4671
4672  bool CanBeNull() const OVERRIDE { return false; }
4673
4674  HLoadClass* GetLoadClass() const {
4675    DCHECK(InputAt(0)->IsLoadClass());
4676    return InputAt(0)->AsLoadClass();
4677  }
4678
4679  HInstruction* GetLength() const {
4680    return InputAt(1);
4681  }
4682
4683  DECLARE_INSTRUCTION(NewArray);
4684
4685 protected:
4686  DEFAULT_COPY_CONSTRUCTOR(NewArray);
4687};
4688
4689class HAdd FINAL : public HBinaryOperation {
4690 public:
4691  HAdd(DataType::Type result_type,
4692       HInstruction* left,
4693       HInstruction* right,
4694       uint32_t dex_pc = kNoDexPc)
4695      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4696
4697  bool IsCommutative() const OVERRIDE { return true; }
4698
4699  template <typename T> static T Compute(T x, T y) { return x + y; }
4700
4701  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4702    return GetBlock()->GetGraph()->GetIntConstant(
4703        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4704  }
4705  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4706    return GetBlock()->GetGraph()->GetLongConstant(
4707        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4708  }
4709  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4710    return GetBlock()->GetGraph()->GetFloatConstant(
4711        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4712  }
4713  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4714    return GetBlock()->GetGraph()->GetDoubleConstant(
4715        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4716  }
4717
4718  DECLARE_INSTRUCTION(Add);
4719
4720 protected:
4721  DEFAULT_COPY_CONSTRUCTOR(Add);
4722};
4723
4724class HSub FINAL : public HBinaryOperation {
4725 public:
4726  HSub(DataType::Type result_type,
4727       HInstruction* left,
4728       HInstruction* right,
4729       uint32_t dex_pc = kNoDexPc)
4730      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4731
4732  template <typename T> static T Compute(T x, T y) { return x - y; }
4733
4734  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4735    return GetBlock()->GetGraph()->GetIntConstant(
4736        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4737  }
4738  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4739    return GetBlock()->GetGraph()->GetLongConstant(
4740        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4741  }
4742  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4743    return GetBlock()->GetGraph()->GetFloatConstant(
4744        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4745  }
4746  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4747    return GetBlock()->GetGraph()->GetDoubleConstant(
4748        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4749  }
4750
4751  DECLARE_INSTRUCTION(Sub);
4752
4753 protected:
4754  DEFAULT_COPY_CONSTRUCTOR(Sub);
4755};
4756
4757class HMul FINAL : public HBinaryOperation {
4758 public:
4759  HMul(DataType::Type result_type,
4760       HInstruction* left,
4761       HInstruction* right,
4762       uint32_t dex_pc = kNoDexPc)
4763      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4764
4765  bool IsCommutative() const OVERRIDE { return true; }
4766
4767  template <typename T> static T Compute(T x, T y) { return x * y; }
4768
4769  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4770    return GetBlock()->GetGraph()->GetIntConstant(
4771        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4772  }
4773  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4774    return GetBlock()->GetGraph()->GetLongConstant(
4775        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4776  }
4777  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4778    return GetBlock()->GetGraph()->GetFloatConstant(
4779        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4780  }
4781  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4782    return GetBlock()->GetGraph()->GetDoubleConstant(
4783        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4784  }
4785
4786  DECLARE_INSTRUCTION(Mul);
4787
4788 protected:
4789  DEFAULT_COPY_CONSTRUCTOR(Mul);
4790};
4791
4792class HDiv FINAL : public HBinaryOperation {
4793 public:
4794  HDiv(DataType::Type result_type,
4795       HInstruction* left,
4796       HInstruction* right,
4797       uint32_t dex_pc)
4798      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4799
4800  template <typename T>
4801  T ComputeIntegral(T x, T y) const {
4802    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4803    // Our graph structure ensures we never have 0 for `y` during
4804    // constant folding.
4805    DCHECK_NE(y, 0);
4806    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4807    return (y == -1) ? -x : x / y;
4808  }
4809
4810  template <typename T>
4811  T ComputeFP(T x, T y) const {
4812    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4813    return x / y;
4814  }
4815
4816  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4817    return GetBlock()->GetGraph()->GetIntConstant(
4818        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4819  }
4820  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4821    return GetBlock()->GetGraph()->GetLongConstant(
4822        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4823  }
4824  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4825    return GetBlock()->GetGraph()->GetFloatConstant(
4826        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4827  }
4828  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4829    return GetBlock()->GetGraph()->GetDoubleConstant(
4830        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4831  }
4832
4833  DECLARE_INSTRUCTION(Div);
4834
4835 protected:
4836  DEFAULT_COPY_CONSTRUCTOR(Div);
4837};
4838
4839class HRem FINAL : public HBinaryOperation {
4840 public:
4841  HRem(DataType::Type result_type,
4842       HInstruction* left,
4843       HInstruction* right,
4844       uint32_t dex_pc)
4845      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4846
4847  template <typename T>
4848  T ComputeIntegral(T x, T y) const {
4849    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4850    // Our graph structure ensures we never have 0 for `y` during
4851    // constant folding.
4852    DCHECK_NE(y, 0);
4853    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4854    return (y == -1) ? 0 : x % y;
4855  }
4856
4857  template <typename T>
4858  T ComputeFP(T x, T y) const {
4859    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4860    return std::fmod(x, y);
4861  }
4862
4863  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4864    return GetBlock()->GetGraph()->GetIntConstant(
4865        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4866  }
4867  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4868    return GetBlock()->GetGraph()->GetLongConstant(
4869        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4870  }
4871  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4872    return GetBlock()->GetGraph()->GetFloatConstant(
4873        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4874  }
4875  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4876    return GetBlock()->GetGraph()->GetDoubleConstant(
4877        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4878  }
4879
4880  DECLARE_INSTRUCTION(Rem);
4881
4882 protected:
4883  DEFAULT_COPY_CONSTRUCTOR(Rem);
4884};
4885
4886class HDivZeroCheck FINAL : public HExpression<1> {
4887 public:
4888  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4889  // constructor.
4890  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4891      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4892    SetRawInputAt(0, value);
4893  }
4894
4895  DataType::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4896
4897  bool CanBeMoved() const OVERRIDE { return true; }
4898
4899  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4900    return true;
4901  }
4902
4903  bool NeedsEnvironment() const OVERRIDE { return true; }
4904  bool CanThrow() const OVERRIDE { return true; }
4905
4906  DECLARE_INSTRUCTION(DivZeroCheck);
4907
4908 protected:
4909  DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
4910};
4911
4912class HShl FINAL : public HBinaryOperation {
4913 public:
4914  HShl(DataType::Type result_type,
4915       HInstruction* value,
4916       HInstruction* distance,
4917       uint32_t dex_pc = kNoDexPc)
4918      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4919    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4920    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4921  }
4922
4923  template <typename T>
4924  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4925    return value << (distance & max_shift_distance);
4926  }
4927
4928  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4929    return GetBlock()->GetGraph()->GetIntConstant(
4930        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4931  }
4932  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4933    return GetBlock()->GetGraph()->GetLongConstant(
4934        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4935  }
4936  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4937                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4938    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4939    UNREACHABLE();
4940  }
4941  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4942                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4943    LOG(FATAL) << DebugName() << " is not defined for float values";
4944    UNREACHABLE();
4945  }
4946  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4947                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4948    LOG(FATAL) << DebugName() << " is not defined for double values";
4949    UNREACHABLE();
4950  }
4951
4952  DECLARE_INSTRUCTION(Shl);
4953
4954 protected:
4955  DEFAULT_COPY_CONSTRUCTOR(Shl);
4956};
4957
4958class HShr FINAL : public HBinaryOperation {
4959 public:
4960  HShr(DataType::Type result_type,
4961       HInstruction* value,
4962       HInstruction* distance,
4963       uint32_t dex_pc = kNoDexPc)
4964      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4965    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4966    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4967  }
4968
4969  template <typename T>
4970  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4971    return value >> (distance & max_shift_distance);
4972  }
4973
4974  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4975    return GetBlock()->GetGraph()->GetIntConstant(
4976        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4977  }
4978  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4979    return GetBlock()->GetGraph()->GetLongConstant(
4980        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4981  }
4982  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4983                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4984    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4985    UNREACHABLE();
4986  }
4987  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4988                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4989    LOG(FATAL) << DebugName() << " is not defined for float values";
4990    UNREACHABLE();
4991  }
4992  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4993                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4994    LOG(FATAL) << DebugName() << " is not defined for double values";
4995    UNREACHABLE();
4996  }
4997
4998  DECLARE_INSTRUCTION(Shr);
4999
5000 protected:
5001  DEFAULT_COPY_CONSTRUCTOR(Shr);
5002};
5003
5004class HUShr FINAL : public HBinaryOperation {
5005 public:
5006  HUShr(DataType::Type result_type,
5007        HInstruction* value,
5008        HInstruction* distance,
5009        uint32_t dex_pc = kNoDexPc)
5010      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
5011    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5012    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5013  }
5014
5015  template <typename T>
5016  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5017    typedef typename std::make_unsigned<T>::type V;
5018    V ux = static_cast<V>(value);
5019    return static_cast<T>(ux >> (distance & max_shift_distance));
5020  }
5021
5022  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5023    return GetBlock()->GetGraph()->GetIntConstant(
5024        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5025  }
5026  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5027    return GetBlock()->GetGraph()->GetLongConstant(
5028        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5029  }
5030  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5031                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5032    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5033    UNREACHABLE();
5034  }
5035  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5036                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5037    LOG(FATAL) << DebugName() << " is not defined for float values";
5038    UNREACHABLE();
5039  }
5040  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5041                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5042    LOG(FATAL) << DebugName() << " is not defined for double values";
5043    UNREACHABLE();
5044  }
5045
5046  DECLARE_INSTRUCTION(UShr);
5047
5048 protected:
5049  DEFAULT_COPY_CONSTRUCTOR(UShr);
5050};
5051
5052class HAnd FINAL : public HBinaryOperation {
5053 public:
5054  HAnd(DataType::Type result_type,
5055       HInstruction* left,
5056       HInstruction* right,
5057       uint32_t dex_pc = kNoDexPc)
5058      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5059
5060  bool IsCommutative() const OVERRIDE { return true; }
5061
5062  template <typename T> static T Compute(T x, T y) { return x & y; }
5063
5064  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5065    return GetBlock()->GetGraph()->GetIntConstant(
5066        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5067  }
5068  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5069    return GetBlock()->GetGraph()->GetLongConstant(
5070        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5071  }
5072  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5073                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5074    LOG(FATAL) << DebugName() << " is not defined for float values";
5075    UNREACHABLE();
5076  }
5077  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5078                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5079    LOG(FATAL) << DebugName() << " is not defined for double values";
5080    UNREACHABLE();
5081  }
5082
5083  DECLARE_INSTRUCTION(And);
5084
5085 protected:
5086  DEFAULT_COPY_CONSTRUCTOR(And);
5087};
5088
5089class HOr FINAL : public HBinaryOperation {
5090 public:
5091  HOr(DataType::Type result_type,
5092      HInstruction* left,
5093      HInstruction* right,
5094      uint32_t dex_pc = kNoDexPc)
5095      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5096
5097  bool IsCommutative() const OVERRIDE { return true; }
5098
5099  template <typename T> static T Compute(T x, T y) { return x | y; }
5100
5101  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5102    return GetBlock()->GetGraph()->GetIntConstant(
5103        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5104  }
5105  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5106    return GetBlock()->GetGraph()->GetLongConstant(
5107        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5108  }
5109  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5110                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5111    LOG(FATAL) << DebugName() << " is not defined for float values";
5112    UNREACHABLE();
5113  }
5114  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5115                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5116    LOG(FATAL) << DebugName() << " is not defined for double values";
5117    UNREACHABLE();
5118  }
5119
5120  DECLARE_INSTRUCTION(Or);
5121
5122 protected:
5123  DEFAULT_COPY_CONSTRUCTOR(Or);
5124};
5125
5126class HXor FINAL : public HBinaryOperation {
5127 public:
5128  HXor(DataType::Type result_type,
5129       HInstruction* left,
5130       HInstruction* right,
5131       uint32_t dex_pc = kNoDexPc)
5132      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5133
5134  bool IsCommutative() const OVERRIDE { return true; }
5135
5136  template <typename T> static T Compute(T x, T y) { return x ^ y; }
5137
5138  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5139    return GetBlock()->GetGraph()->GetIntConstant(
5140        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5141  }
5142  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5143    return GetBlock()->GetGraph()->GetLongConstant(
5144        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5145  }
5146  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5147                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5148    LOG(FATAL) << DebugName() << " is not defined for float values";
5149    UNREACHABLE();
5150  }
5151  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5152                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5153    LOG(FATAL) << DebugName() << " is not defined for double values";
5154    UNREACHABLE();
5155  }
5156
5157  DECLARE_INSTRUCTION(Xor);
5158
5159 protected:
5160  DEFAULT_COPY_CONSTRUCTOR(Xor);
5161};
5162
5163class HRor FINAL : public HBinaryOperation {
5164 public:
5165  HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5166    : HBinaryOperation(result_type, value, distance) {
5167    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5168    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5169  }
5170
5171  template <typename T>
5172  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5173    typedef typename std::make_unsigned<T>::type V;
5174    V ux = static_cast<V>(value);
5175    if ((distance & max_shift_value) == 0) {
5176      return static_cast<T>(ux);
5177    } else {
5178      const V reg_bits = sizeof(T) * 8;
5179      return static_cast<T>(ux >> (distance & max_shift_value)) |
5180                           (value << (reg_bits - (distance & max_shift_value)));
5181    }
5182  }
5183
5184  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5185    return GetBlock()->GetGraph()->GetIntConstant(
5186        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5187  }
5188  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5189    return GetBlock()->GetGraph()->GetLongConstant(
5190        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5191  }
5192  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5193                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5194    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5195    UNREACHABLE();
5196  }
5197  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5198                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5199    LOG(FATAL) << DebugName() << " is not defined for float values";
5200    UNREACHABLE();
5201  }
5202  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5203                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5204    LOG(FATAL) << DebugName() << " is not defined for double values";
5205    UNREACHABLE();
5206  }
5207
5208  DECLARE_INSTRUCTION(Ror);
5209
5210 protected:
5211  DEFAULT_COPY_CONSTRUCTOR(Ror);
5212};
5213
5214// The value of a parameter in this method. Its location depends on
5215// the calling convention.
5216class HParameterValue FINAL : public HExpression<0> {
5217 public:
5218  HParameterValue(const DexFile& dex_file,
5219                  dex::TypeIndex type_index,
5220                  uint8_t index,
5221                  DataType::Type parameter_type,
5222                  bool is_this = false)
5223      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5224        dex_file_(dex_file),
5225        type_index_(type_index),
5226        index_(index) {
5227    SetPackedFlag<kFlagIsThis>(is_this);
5228    SetPackedFlag<kFlagCanBeNull>(!is_this);
5229  }
5230
5231  const DexFile& GetDexFile() const { return dex_file_; }
5232  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5233  uint8_t GetIndex() const { return index_; }
5234  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5235
5236  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5237  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5238
5239  DECLARE_INSTRUCTION(ParameterValue);
5240
5241 protected:
5242  DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5243
5244 private:
5245  // Whether or not the parameter value corresponds to 'this' argument.
5246  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5247  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5248  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5249  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5250                "Too many packed fields.");
5251
5252  const DexFile& dex_file_;
5253  const dex::TypeIndex type_index_;
5254  // The index of this parameter in the parameters list. Must be less
5255  // than HGraph::number_of_in_vregs_.
5256  const uint8_t index_;
5257};
5258
5259class HNot FINAL : public HUnaryOperation {
5260 public:
5261  HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5262      : HUnaryOperation(result_type, input, dex_pc) {}
5263
5264  bool CanBeMoved() const OVERRIDE { return true; }
5265  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5266    return true;
5267  }
5268
5269  template <typename T> static T Compute(T x) { return ~x; }
5270
5271  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5272    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5273  }
5274  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5275    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5276  }
5277  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5278    LOG(FATAL) << DebugName() << " is not defined for float values";
5279    UNREACHABLE();
5280  }
5281  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5282    LOG(FATAL) << DebugName() << " is not defined for double values";
5283    UNREACHABLE();
5284  }
5285
5286  DECLARE_INSTRUCTION(Not);
5287
5288 protected:
5289  DEFAULT_COPY_CONSTRUCTOR(Not);
5290};
5291
5292class HBooleanNot FINAL : public HUnaryOperation {
5293 public:
5294  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5295      : HUnaryOperation(DataType::Type::kBool, input, dex_pc) {}
5296
5297  bool CanBeMoved() const OVERRIDE { return true; }
5298  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5299    return true;
5300  }
5301
5302  template <typename T> static bool Compute(T x) {
5303    DCHECK(IsUint<1>(x)) << x;
5304    return !x;
5305  }
5306
5307  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5308    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5309  }
5310  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5311    LOG(FATAL) << DebugName() << " is not defined for long values";
5312    UNREACHABLE();
5313  }
5314  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5315    LOG(FATAL) << DebugName() << " is not defined for float values";
5316    UNREACHABLE();
5317  }
5318  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5319    LOG(FATAL) << DebugName() << " is not defined for double values";
5320    UNREACHABLE();
5321  }
5322
5323  DECLARE_INSTRUCTION(BooleanNot);
5324
5325 protected:
5326  DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
5327};
5328
5329class HTypeConversion FINAL : public HExpression<1> {
5330 public:
5331  // Instantiate a type conversion of `input` to `result_type`.
5332  HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5333      : HExpression(result_type, SideEffects::None(), dex_pc) {
5334    SetRawInputAt(0, input);
5335    // Invariant: We should never generate a conversion to a Boolean value.
5336    DCHECK_NE(DataType::Type::kBool, result_type);
5337  }
5338
5339  HInstruction* GetInput() const { return InputAt(0); }
5340  DataType::Type GetInputType() const { return GetInput()->GetType(); }
5341  DataType::Type GetResultType() const { return GetType(); }
5342
5343  bool CanBeMoved() const OVERRIDE { return true; }
5344  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5345    return true;
5346  }
5347
5348  // Try to statically evaluate the conversion and return a HConstant
5349  // containing the result.  If the input cannot be converted, return nullptr.
5350  HConstant* TryStaticEvaluation() const;
5351
5352  DECLARE_INSTRUCTION(TypeConversion);
5353
5354 protected:
5355  DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
5356};
5357
5358static constexpr uint32_t kNoRegNumber = -1;
5359
5360class HNullCheck FINAL : public HExpression<1> {
5361 public:
5362  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5363  // constructor.
5364  HNullCheck(HInstruction* value, uint32_t dex_pc)
5365      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5366    SetRawInputAt(0, value);
5367  }
5368
5369  bool IsClonable() const OVERRIDE { return true; }
5370  bool CanBeMoved() const OVERRIDE { return true; }
5371  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5372    return true;
5373  }
5374
5375  bool NeedsEnvironment() const OVERRIDE { return true; }
5376
5377  bool CanThrow() const OVERRIDE { return true; }
5378
5379  bool CanBeNull() const OVERRIDE { return false; }
5380
5381  DECLARE_INSTRUCTION(NullCheck);
5382
5383 protected:
5384  DEFAULT_COPY_CONSTRUCTOR(NullCheck);
5385};
5386
5387// Embeds an ArtField and all the information required by the compiler. We cache
5388// that information to avoid requiring the mutator lock every time we need it.
5389class FieldInfo : public ValueObject {
5390 public:
5391  FieldInfo(ArtField* field,
5392            MemberOffset field_offset,
5393            DataType::Type field_type,
5394            bool is_volatile,
5395            uint32_t index,
5396            uint16_t declaring_class_def_index,
5397            const DexFile& dex_file)
5398      : field_(field),
5399        field_offset_(field_offset),
5400        field_type_(field_type),
5401        is_volatile_(is_volatile),
5402        index_(index),
5403        declaring_class_def_index_(declaring_class_def_index),
5404        dex_file_(dex_file) {}
5405
5406  ArtField* GetField() const { return field_; }
5407  MemberOffset GetFieldOffset() const { return field_offset_; }
5408  DataType::Type GetFieldType() const { return field_type_; }
5409  uint32_t GetFieldIndex() const { return index_; }
5410  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5411  const DexFile& GetDexFile() const { return dex_file_; }
5412  bool IsVolatile() const { return is_volatile_; }
5413
5414 private:
5415  ArtField* const field_;
5416  const MemberOffset field_offset_;
5417  const DataType::Type field_type_;
5418  const bool is_volatile_;
5419  const uint32_t index_;
5420  const uint16_t declaring_class_def_index_;
5421  const DexFile& dex_file_;
5422};
5423
5424class HInstanceFieldGet FINAL : public HExpression<1> {
5425 public:
5426  HInstanceFieldGet(HInstruction* value,
5427                    ArtField* field,
5428                    DataType::Type field_type,
5429                    MemberOffset field_offset,
5430                    bool is_volatile,
5431                    uint32_t field_idx,
5432                    uint16_t declaring_class_def_index,
5433                    const DexFile& dex_file,
5434                    uint32_t dex_pc)
5435      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5436        field_info_(field,
5437                    field_offset,
5438                    field_type,
5439                    is_volatile,
5440                    field_idx,
5441                    declaring_class_def_index,
5442                    dex_file) {
5443    SetRawInputAt(0, value);
5444  }
5445
5446  bool IsClonable() const OVERRIDE { return true; }
5447  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5448
5449  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5450    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5451    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5452  }
5453
5454  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5455    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5456  }
5457
5458  size_t ComputeHashCode() const OVERRIDE {
5459    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5460  }
5461
5462  const FieldInfo& GetFieldInfo() const { return field_info_; }
5463  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5464  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5465  bool IsVolatile() const { return field_info_.IsVolatile(); }
5466
5467  void SetType(DataType::Type new_type) {
5468    DCHECK(DataType::IsIntegralType(GetType()));
5469    DCHECK(DataType::IsIntegralType(new_type));
5470    DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5471    SetPackedField<TypeField>(new_type);
5472  }
5473
5474  DECLARE_INSTRUCTION(InstanceFieldGet);
5475
5476 protected:
5477  DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
5478
5479 private:
5480  const FieldInfo field_info_;
5481};
5482
5483class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5484 public:
5485  HInstanceFieldSet(HInstruction* object,
5486                    HInstruction* value,
5487                    ArtField* field,
5488                    DataType::Type field_type,
5489                    MemberOffset field_offset,
5490                    bool is_volatile,
5491                    uint32_t field_idx,
5492                    uint16_t declaring_class_def_index,
5493                    const DexFile& dex_file,
5494                    uint32_t dex_pc)
5495      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5496        field_info_(field,
5497                    field_offset,
5498                    field_type,
5499                    is_volatile,
5500                    field_idx,
5501                    declaring_class_def_index,
5502                    dex_file) {
5503    SetPackedFlag<kFlagValueCanBeNull>(true);
5504    SetRawInputAt(0, object);
5505    SetRawInputAt(1, value);
5506  }
5507
5508  bool IsClonable() const OVERRIDE { return true; }
5509
5510  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5511    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5512  }
5513
5514  const FieldInfo& GetFieldInfo() const { return field_info_; }
5515  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5516  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5517  bool IsVolatile() const { return field_info_.IsVolatile(); }
5518  HInstruction* GetValue() const { return InputAt(1); }
5519  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5520  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5521
5522  DECLARE_INSTRUCTION(InstanceFieldSet);
5523
5524 protected:
5525  DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
5526
5527 private:
5528  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5529  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5530  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5531                "Too many packed fields.");
5532
5533  const FieldInfo field_info_;
5534};
5535
5536class HArrayGet FINAL : public HExpression<2> {
5537 public:
5538  HArrayGet(HInstruction* array,
5539            HInstruction* index,
5540            DataType::Type type,
5541            uint32_t dex_pc)
5542     : HArrayGet(array,
5543                 index,
5544                 type,
5545                 SideEffects::ArrayReadOfType(type),
5546                 dex_pc,
5547                 /* is_string_char_at */ false) {}
5548
5549  HArrayGet(HInstruction* array,
5550            HInstruction* index,
5551            DataType::Type type,
5552            SideEffects side_effects,
5553            uint32_t dex_pc,
5554            bool is_string_char_at)
5555      : HExpression(type, side_effects, dex_pc) {
5556    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5557    SetRawInputAt(0, array);
5558    SetRawInputAt(1, index);
5559  }
5560
5561  bool IsClonable() const OVERRIDE { return true; }
5562  bool CanBeMoved() const OVERRIDE { return true; }
5563  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5564    return true;
5565  }
5566  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5567    // TODO: We can be smarter here.
5568    // Currently, unless the array is the result of NewArray, the array access is always
5569    // preceded by some form of null NullCheck necessary for the bounds check, usually
5570    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5571    // dynamic BCE. There are cases when these could be removed to produce better code.
5572    // If we ever add optimizations to do so we should allow an implicit check here
5573    // (as long as the address falls in the first page).
5574    //
5575    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5576    //     a = cond ? new int[1] : null;
5577    //     a[0];  // The Phi does not need bounds check for either input.
5578    return false;
5579  }
5580
5581  bool IsEquivalentOf(HArrayGet* other) const {
5582    bool result = (GetDexPc() == other->GetDexPc());
5583    if (kIsDebugBuild && result) {
5584      DCHECK_EQ(GetBlock(), other->GetBlock());
5585      DCHECK_EQ(GetArray(), other->GetArray());
5586      DCHECK_EQ(GetIndex(), other->GetIndex());
5587      if (DataType::IsIntOrLongType(GetType())) {
5588        DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5589      } else {
5590        DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5591        DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5592      }
5593    }
5594    return result;
5595  }
5596
5597  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5598
5599  HInstruction* GetArray() const { return InputAt(0); }
5600  HInstruction* GetIndex() const { return InputAt(1); }
5601
5602  void SetType(DataType::Type new_type) {
5603    DCHECK(DataType::IsIntegralType(GetType()));
5604    DCHECK(DataType::IsIntegralType(new_type));
5605    DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5606    SetPackedField<TypeField>(new_type);
5607  }
5608
5609  DECLARE_INSTRUCTION(ArrayGet);
5610
5611 protected:
5612  DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
5613
5614 private:
5615  // We treat a String as an array, creating the HArrayGet from String.charAt()
5616  // intrinsic in the instruction simplifier. We can always determine whether
5617  // a particular HArrayGet is actually a String.charAt() by looking at the type
5618  // of the input but that requires holding the mutator lock, so we prefer to use
5619  // a flag, so that code generators don't need to do the locking.
5620  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5621  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5622  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5623                "Too many packed fields.");
5624};
5625
5626class HArraySet FINAL : public HTemplateInstruction<3> {
5627 public:
5628  HArraySet(HInstruction* array,
5629            HInstruction* index,
5630            HInstruction* value,
5631            DataType::Type expected_component_type,
5632            uint32_t dex_pc)
5633      : HArraySet(array,
5634                  index,
5635                  value,
5636                  expected_component_type,
5637                  // Make a best guess for side effects now, may be refined during SSA building.
5638                  ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
5639                  dex_pc) {}
5640
5641  HArraySet(HInstruction* array,
5642            HInstruction* index,
5643            HInstruction* value,
5644            DataType::Type expected_component_type,
5645            SideEffects side_effects,
5646            uint32_t dex_pc)
5647      : HTemplateInstruction(side_effects, dex_pc) {
5648    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5649    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
5650    SetPackedFlag<kFlagValueCanBeNull>(true);
5651    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5652    SetRawInputAt(0, array);
5653    SetRawInputAt(1, index);
5654    SetRawInputAt(2, value);
5655  }
5656
5657  bool IsClonable() const OVERRIDE { return true; }
5658
5659  bool NeedsEnvironment() const OVERRIDE {
5660    // We call a runtime method to throw ArrayStoreException.
5661    return NeedsTypeCheck();
5662  }
5663
5664  // Can throw ArrayStoreException.
5665  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5666
5667  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5668    // TODO: Same as for ArrayGet.
5669    return false;
5670  }
5671
5672  void ClearNeedsTypeCheck() {
5673    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5674  }
5675
5676  void ClearValueCanBeNull() {
5677    SetPackedFlag<kFlagValueCanBeNull>(false);
5678  }
5679
5680  void SetStaticTypeOfArrayIsObjectArray() {
5681    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5682  }
5683
5684  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5685  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5686  bool StaticTypeOfArrayIsObjectArray() const {
5687    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5688  }
5689
5690  HInstruction* GetArray() const { return InputAt(0); }
5691  HInstruction* GetIndex() const { return InputAt(1); }
5692  HInstruction* GetValue() const { return InputAt(2); }
5693
5694  DataType::Type GetComponentType() const {
5695    return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
5696  }
5697
5698  static DataType::Type GetComponentType(DataType::Type value_type,
5699                                         DataType::Type expected_component_type) {
5700    // The Dex format does not type floating point index operations. Since the
5701    // `expected_component_type` comes from SSA building and can therefore not
5702    // be correct, we also check what is the value type. If it is a floating
5703    // point type, we must use that type.
5704    return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
5705        ? value_type
5706        : expected_component_type;
5707  }
5708
5709  DataType::Type GetRawExpectedComponentType() const {
5710    return GetPackedField<ExpectedComponentTypeField>();
5711  }
5712
5713  static SideEffects ComputeSideEffects(DataType::Type type) {
5714    return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
5715  }
5716
5717  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
5718    return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
5719                                                      : SideEffects::None();
5720  }
5721
5722  DECLARE_INSTRUCTION(ArraySet);
5723
5724 protected:
5725  DEFAULT_COPY_CONSTRUCTOR(ArraySet);
5726
5727 private:
5728  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5729  static constexpr size_t kFieldExpectedComponentTypeSize =
5730      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
5731  static constexpr size_t kFlagNeedsTypeCheck =
5732      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5733  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5734  // Cached information for the reference_type_info_ so that codegen
5735  // does not need to inspect the static type.
5736  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5737  static constexpr size_t kNumberOfArraySetPackedBits =
5738      kFlagStaticTypeOfArrayIsObjectArray + 1;
5739  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5740  using ExpectedComponentTypeField =
5741      BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5742};
5743
5744class HArrayLength FINAL : public HExpression<1> {
5745 public:
5746  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5747      : HExpression(DataType::Type::kInt32, SideEffects::None(), dex_pc) {
5748    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5749    // Note that arrays do not change length, so the instruction does not
5750    // depend on any write.
5751    SetRawInputAt(0, array);
5752  }
5753
5754  bool IsClonable() const OVERRIDE { return true; }
5755  bool CanBeMoved() const OVERRIDE { return true; }
5756  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5757    return true;
5758  }
5759  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5760    return obj == InputAt(0);
5761  }
5762
5763  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5764
5765  DECLARE_INSTRUCTION(ArrayLength);
5766
5767 protected:
5768  DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
5769
5770 private:
5771  // We treat a String as an array, creating the HArrayLength from String.length()
5772  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5773  // determine whether a particular HArrayLength is actually a String.length() by
5774  // looking at the type of the input but that requires holding the mutator lock, so
5775  // we prefer to use a flag, so that code generators don't need to do the locking.
5776  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5777  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5778  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5779                "Too many packed fields.");
5780};
5781
5782class HBoundsCheck FINAL : public HExpression<2> {
5783 public:
5784  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5785  // constructor.
5786  HBoundsCheck(HInstruction* index,
5787               HInstruction* length,
5788               uint32_t dex_pc,
5789               bool is_string_char_at = false)
5790      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5791    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
5792    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5793    SetRawInputAt(0, index);
5794    SetRawInputAt(1, length);
5795  }
5796
5797  bool IsClonable() const OVERRIDE { return true; }
5798  bool CanBeMoved() const OVERRIDE { return true; }
5799  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5800    return true;
5801  }
5802
5803  bool NeedsEnvironment() const OVERRIDE { return true; }
5804
5805  bool CanThrow() const OVERRIDE { return true; }
5806
5807  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5808
5809  HInstruction* GetIndex() const { return InputAt(0); }
5810
5811  DECLARE_INSTRUCTION(BoundsCheck);
5812
5813 protected:
5814  DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
5815
5816 private:
5817  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5818};
5819
5820class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5821 public:
5822  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5823      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5824
5825  bool IsClonable() const OVERRIDE { return true; }
5826
5827  bool NeedsEnvironment() const OVERRIDE {
5828    return true;
5829  }
5830
5831  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5832  SlowPathCode* GetSlowPath() const { return slow_path_; }
5833
5834  DECLARE_INSTRUCTION(SuspendCheck);
5835
5836 protected:
5837  DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
5838
5839 private:
5840  // Only used for code generation, in order to share the same slow path between back edges
5841  // of a same loop.
5842  SlowPathCode* slow_path_;
5843};
5844
5845// Pseudo-instruction which provides the native debugger with mapping information.
5846// It ensures that we can generate line number and local variables at this point.
5847class HNativeDebugInfo : public HTemplateInstruction<0> {
5848 public:
5849  explicit HNativeDebugInfo(uint32_t dex_pc)
5850      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5851
5852  bool NeedsEnvironment() const OVERRIDE {
5853    return true;
5854  }
5855
5856  DECLARE_INSTRUCTION(NativeDebugInfo);
5857
5858 protected:
5859  DEFAULT_COPY_CONSTRUCTOR(NativeDebugInfo);
5860};
5861
5862/**
5863 * Instruction to load a Class object.
5864 */
5865class HLoadClass FINAL : public HInstruction {
5866 public:
5867  // Determines how to load the Class.
5868  enum class LoadKind {
5869    // We cannot load this class. See HSharpening::SharpenLoadClass.
5870    kInvalid = -1,
5871
5872    // Use the Class* from the method's own ArtMethod*.
5873    kReferrersClass,
5874
5875    // Use PC-relative boot image Class* address that will be known at link time.
5876    // Used for boot image classes referenced by boot image code.
5877    kBootImageLinkTimePcRelative,
5878
5879    // Use a known boot image Class* address, embedded in the code by the codegen.
5880    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5881    kBootImageAddress,
5882
5883    // Use a PC-relative load from a boot image ClassTable mmapped into the .bss
5884    // of the oat file.
5885    kBootImageClassTable,
5886
5887    // Load from an entry in the .bss section using a PC-relative load.
5888    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5889    kBssEntry,
5890
5891    // Load from the root table associated with the JIT compiled method.
5892    kJitTableAddress,
5893
5894    // Load using a simple runtime call. This is the fall-back load kind when
5895    // the codegen is unable to use another appropriate kind.
5896    kRuntimeCall,
5897
5898    kLast = kRuntimeCall
5899  };
5900
5901  HLoadClass(HCurrentMethod* current_method,
5902             dex::TypeIndex type_index,
5903             const DexFile& dex_file,
5904             Handle<mirror::Class> klass,
5905             bool is_referrers_class,
5906             uint32_t dex_pc,
5907             bool needs_access_check)
5908      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5909        special_input_(HUserRecord<HInstruction*>(current_method)),
5910        type_index_(type_index),
5911        dex_file_(dex_file),
5912        klass_(klass),
5913        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5914    // Referrers class should not need access check. We never inline unverified
5915    // methods so we can't possibly end up in this situation.
5916    DCHECK(!is_referrers_class || !needs_access_check);
5917
5918    SetPackedField<LoadKindField>(
5919        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
5920    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5921    SetPackedFlag<kFlagIsInBootImage>(false);
5922    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5923  }
5924
5925  bool IsClonable() const OVERRIDE { return true; }
5926
5927  void SetLoadKind(LoadKind load_kind);
5928
5929  LoadKind GetLoadKind() const {
5930    return GetPackedField<LoadKindField>();
5931  }
5932
5933  bool CanBeMoved() const OVERRIDE { return true; }
5934
5935  bool InstructionDataEquals(const HInstruction* other) const;
5936
5937  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5938
5939  bool CanBeNull() const OVERRIDE { return false; }
5940
5941  bool NeedsEnvironment() const OVERRIDE {
5942    return CanCallRuntime();
5943  }
5944
5945  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5946    // The entrypoint the code generator is going to call does not do
5947    // clinit of the class.
5948    DCHECK(!NeedsAccessCheck());
5949    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5950  }
5951
5952  bool CanCallRuntime() const {
5953    return NeedsAccessCheck() ||
5954           MustGenerateClinitCheck() ||
5955           GetLoadKind() == LoadKind::kRuntimeCall ||
5956           GetLoadKind() == LoadKind::kBssEntry;
5957  }
5958
5959  bool CanThrow() const OVERRIDE {
5960    return NeedsAccessCheck() ||
5961           MustGenerateClinitCheck() ||
5962           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5963           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5964           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5965           ((GetLoadKind() == LoadKind::kRuntimeCall ||
5966             GetLoadKind() == LoadKind::kBssEntry) &&
5967            !IsInBootImage());
5968  }
5969
5970  ReferenceTypeInfo GetLoadedClassRTI() {
5971    return loaded_class_rti_;
5972  }
5973
5974  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5975    // Make sure we only set exact types (the loaded class should never be merged).
5976    DCHECK(rti.IsExact());
5977    loaded_class_rti_ = rti;
5978  }
5979
5980  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5981  const DexFile& GetDexFile() const { return dex_file_; }
5982
5983  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5984    return GetLoadKind() == LoadKind::kRuntimeCall;
5985  }
5986
5987  static SideEffects SideEffectsForArchRuntimeCalls() {
5988    return SideEffects::CanTriggerGC();
5989  }
5990
5991  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5992  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5993  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5994  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5995
5996  void MarkInBootImage() {
5997    SetPackedFlag<kFlagIsInBootImage>(true);
5998  }
5999
6000  void AddSpecialInput(HInstruction* special_input);
6001
6002  using HInstruction::GetInputRecords;  // Keep the const version visible.
6003  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6004    return ArrayRef<HUserRecord<HInstruction*>>(
6005        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6006  }
6007
6008  DataType::Type GetType() const OVERRIDE {
6009    return DataType::Type::kReference;
6010  }
6011
6012  Handle<mirror::Class> GetClass() const {
6013    return klass_;
6014  }
6015
6016  DECLARE_INSTRUCTION(LoadClass);
6017
6018 protected:
6019  DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6020
6021 private:
6022  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
6023  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
6024  // Whether this instruction must generate the initialization check.
6025  // Used for code generation.
6026  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
6027  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
6028  static constexpr size_t kFieldLoadKindSize =
6029      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6030  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6031  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6032  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6033
6034  static bool HasTypeReference(LoadKind load_kind) {
6035    return load_kind == LoadKind::kReferrersClass ||
6036        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6037        load_kind == LoadKind::kBootImageClassTable ||
6038        load_kind == LoadKind::kBssEntry ||
6039        load_kind == LoadKind::kRuntimeCall;
6040  }
6041
6042  void SetLoadKindInternal(LoadKind load_kind);
6043
6044  // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6045  // For other load kinds it's empty or possibly some architecture-specific instruction
6046  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6047  HUserRecord<HInstruction*> special_input_;
6048
6049  // A type index and dex file where the class can be accessed. The dex file can be:
6050  // - The compiling method's dex file if the class is defined there too.
6051  // - The compiling method's dex file if the class is referenced there.
6052  // - The dex file where the class is defined. When the load kind can only be
6053  //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6054  const dex::TypeIndex type_index_;
6055  const DexFile& dex_file_;
6056
6057  Handle<mirror::Class> klass_;
6058
6059  ReferenceTypeInfo loaded_class_rti_;
6060};
6061std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6062
6063// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6064inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6065  // The load kind should be determined before inserting the instruction to the graph.
6066  DCHECK(GetBlock() == nullptr);
6067  DCHECK(GetEnvironment() == nullptr);
6068  SetPackedField<LoadKindField>(load_kind);
6069  if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6070    special_input_ = HUserRecord<HInstruction*>(nullptr);
6071  }
6072  if (!NeedsEnvironment()) {
6073    SetSideEffects(SideEffects::None());
6074  }
6075}
6076
6077// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6078inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6079  // The special input is used for PC-relative loads on some architectures,
6080  // including literal pool loads, which are PC-relative too.
6081  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6082         GetLoadKind() == LoadKind::kBootImageAddress ||
6083         GetLoadKind() == LoadKind::kBootImageClassTable ||
6084         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6085  DCHECK(special_input_.GetInstruction() == nullptr);
6086  special_input_ = HUserRecord<HInstruction*>(special_input);
6087  special_input->AddUseAt(this, 0);
6088}
6089
6090class HLoadString FINAL : public HInstruction {
6091 public:
6092  // Determines how to load the String.
6093  enum class LoadKind {
6094    // Use PC-relative boot image String* address that will be known at link time.
6095    // Used for boot image strings referenced by boot image code.
6096    kBootImageLinkTimePcRelative,
6097
6098    // Use a known boot image String* address, embedded in the code by the codegen.
6099    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
6100    kBootImageAddress,
6101
6102    // Use a PC-relative load from a boot image InternTable mmapped into the .bss
6103    // of the oat file.
6104    kBootImageInternTable,
6105
6106    // Load from an entry in the .bss section using a PC-relative load.
6107    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
6108    kBssEntry,
6109
6110    // Load from the root table associated with the JIT compiled method.
6111    kJitTableAddress,
6112
6113    // Load using a simple runtime call. This is the fall-back load kind when
6114    // the codegen is unable to use another appropriate kind.
6115    kRuntimeCall,
6116
6117    kLast = kRuntimeCall,
6118  };
6119
6120  HLoadString(HCurrentMethod* current_method,
6121              dex::StringIndex string_index,
6122              const DexFile& dex_file,
6123              uint32_t dex_pc)
6124      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
6125        special_input_(HUserRecord<HInstruction*>(current_method)),
6126        string_index_(string_index),
6127        dex_file_(dex_file) {
6128    SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6129  }
6130
6131  bool IsClonable() const OVERRIDE { return true; }
6132
6133  void SetLoadKind(LoadKind load_kind);
6134
6135  LoadKind GetLoadKind() const {
6136    return GetPackedField<LoadKindField>();
6137  }
6138
6139  const DexFile& GetDexFile() const {
6140    return dex_file_;
6141  }
6142
6143  dex::StringIndex GetStringIndex() const {
6144    return string_index_;
6145  }
6146
6147  Handle<mirror::String> GetString() const {
6148    return string_;
6149  }
6150
6151  void SetString(Handle<mirror::String> str) {
6152    string_ = str;
6153  }
6154
6155  bool CanBeMoved() const OVERRIDE { return true; }
6156
6157  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
6158
6159  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
6160
6161  // Will call the runtime if we need to load the string through
6162  // the dex cache and the string is not guaranteed to be there yet.
6163  bool NeedsEnvironment() const OVERRIDE {
6164    LoadKind load_kind = GetLoadKind();
6165    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6166        load_kind == LoadKind::kBootImageAddress ||
6167        load_kind == LoadKind::kBootImageInternTable ||
6168        load_kind == LoadKind::kJitTableAddress) {
6169      return false;
6170    }
6171    return true;
6172  }
6173
6174  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
6175    return GetLoadKind() == LoadKind::kRuntimeCall;
6176  }
6177
6178  bool CanBeNull() const OVERRIDE { return false; }
6179  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
6180
6181  static SideEffects SideEffectsForArchRuntimeCalls() {
6182    return SideEffects::CanTriggerGC();
6183  }
6184
6185  void AddSpecialInput(HInstruction* special_input);
6186
6187  using HInstruction::GetInputRecords;  // Keep the const version visible.
6188  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6189    return ArrayRef<HUserRecord<HInstruction*>>(
6190        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6191  }
6192
6193  DataType::Type GetType() const OVERRIDE {
6194    return DataType::Type::kReference;
6195  }
6196
6197  DECLARE_INSTRUCTION(LoadString);
6198
6199 protected:
6200  DEFAULT_COPY_CONSTRUCTOR(LoadString);
6201
6202 private:
6203  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6204  static constexpr size_t kFieldLoadKindSize =
6205      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6206  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6207  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6208  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6209
6210  void SetLoadKindInternal(LoadKind load_kind);
6211
6212  // The special input is the HCurrentMethod for kRuntimeCall.
6213  // For other load kinds it's empty or possibly some architecture-specific instruction
6214  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6215  HUserRecord<HInstruction*> special_input_;
6216
6217  dex::StringIndex string_index_;
6218  const DexFile& dex_file_;
6219
6220  Handle<mirror::String> string_;
6221};
6222std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6223
6224// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6225inline void HLoadString::SetLoadKind(LoadKind load_kind) {
6226  // The load kind should be determined before inserting the instruction to the graph.
6227  DCHECK(GetBlock() == nullptr);
6228  DCHECK(GetEnvironment() == nullptr);
6229  DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
6230  SetPackedField<LoadKindField>(load_kind);
6231  if (load_kind != LoadKind::kRuntimeCall) {
6232    special_input_ = HUserRecord<HInstruction*>(nullptr);
6233  }
6234  if (!NeedsEnvironment()) {
6235    SetSideEffects(SideEffects::None());
6236  }
6237}
6238
6239// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6240inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6241  // The special input is used for PC-relative loads on some architectures,
6242  // including literal pool loads, which are PC-relative too.
6243  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6244         GetLoadKind() == LoadKind::kBootImageAddress ||
6245         GetLoadKind() == LoadKind::kBootImageInternTable ||
6246         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6247  // HLoadString::GetInputRecords() returns an empty array at this point,
6248  // so use the GetInputRecords() from the base class to set the input record.
6249  DCHECK(special_input_.GetInstruction() == nullptr);
6250  special_input_ = HUserRecord<HInstruction*>(special_input);
6251  special_input->AddUseAt(this, 0);
6252}
6253
6254/**
6255 * Performs an initialization check on its Class object input.
6256 */
6257class HClinitCheck FINAL : public HExpression<1> {
6258 public:
6259  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6260      : HExpression(
6261            DataType::Type::kReference,
6262            SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6263            dex_pc) {
6264    SetRawInputAt(0, constant);
6265  }
6266
6267  bool IsClonable() const OVERRIDE { return true; }
6268  bool CanBeMoved() const OVERRIDE { return true; }
6269  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6270    return true;
6271  }
6272
6273  bool NeedsEnvironment() const OVERRIDE {
6274    // May call runtime to initialize the class.
6275    return true;
6276  }
6277
6278  bool CanThrow() const OVERRIDE { return true; }
6279
6280  HLoadClass* GetLoadClass() const {
6281    DCHECK(InputAt(0)->IsLoadClass());
6282    return InputAt(0)->AsLoadClass();
6283  }
6284
6285  DECLARE_INSTRUCTION(ClinitCheck);
6286
6287
6288 protected:
6289  DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
6290};
6291
6292class HStaticFieldGet FINAL : public HExpression<1> {
6293 public:
6294  HStaticFieldGet(HInstruction* cls,
6295                  ArtField* field,
6296                  DataType::Type field_type,
6297                  MemberOffset field_offset,
6298                  bool is_volatile,
6299                  uint32_t field_idx,
6300                  uint16_t declaring_class_def_index,
6301                  const DexFile& dex_file,
6302                  uint32_t dex_pc)
6303      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6304        field_info_(field,
6305                    field_offset,
6306                    field_type,
6307                    is_volatile,
6308                    field_idx,
6309                    declaring_class_def_index,
6310                    dex_file) {
6311    SetRawInputAt(0, cls);
6312  }
6313
6314
6315  bool IsClonable() const OVERRIDE { return true; }
6316  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6317
6318  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6319    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6320    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6321  }
6322
6323  size_t ComputeHashCode() const OVERRIDE {
6324    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6325  }
6326
6327  const FieldInfo& GetFieldInfo() const { return field_info_; }
6328  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6329  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6330  bool IsVolatile() const { return field_info_.IsVolatile(); }
6331
6332  void SetType(DataType::Type new_type) {
6333    DCHECK(DataType::IsIntegralType(GetType()));
6334    DCHECK(DataType::IsIntegralType(new_type));
6335    DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6336    SetPackedField<TypeField>(new_type);
6337  }
6338
6339  DECLARE_INSTRUCTION(StaticFieldGet);
6340
6341 protected:
6342  DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
6343
6344 private:
6345  const FieldInfo field_info_;
6346};
6347
6348class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6349 public:
6350  HStaticFieldSet(HInstruction* cls,
6351                  HInstruction* value,
6352                  ArtField* field,
6353                  DataType::Type field_type,
6354                  MemberOffset field_offset,
6355                  bool is_volatile,
6356                  uint32_t field_idx,
6357                  uint16_t declaring_class_def_index,
6358                  const DexFile& dex_file,
6359                  uint32_t dex_pc)
6360      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6361        field_info_(field,
6362                    field_offset,
6363                    field_type,
6364                    is_volatile,
6365                    field_idx,
6366                    declaring_class_def_index,
6367                    dex_file) {
6368    SetPackedFlag<kFlagValueCanBeNull>(true);
6369    SetRawInputAt(0, cls);
6370    SetRawInputAt(1, value);
6371  }
6372
6373  bool IsClonable() const OVERRIDE { return true; }
6374  const FieldInfo& GetFieldInfo() const { return field_info_; }
6375  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6376  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6377  bool IsVolatile() const { return field_info_.IsVolatile(); }
6378
6379  HInstruction* GetValue() const { return InputAt(1); }
6380  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6381  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6382
6383  DECLARE_INSTRUCTION(StaticFieldSet);
6384
6385 protected:
6386  DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
6387
6388 private:
6389  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6390  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6391  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6392                "Too many packed fields.");
6393
6394  const FieldInfo field_info_;
6395};
6396
6397class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6398 public:
6399  HUnresolvedInstanceFieldGet(HInstruction* obj,
6400                              DataType::Type field_type,
6401                              uint32_t field_index,
6402                              uint32_t dex_pc)
6403      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6404        field_index_(field_index) {
6405    SetRawInputAt(0, obj);
6406  }
6407
6408  bool IsClonable() const OVERRIDE { return true; }
6409  bool NeedsEnvironment() const OVERRIDE { return true; }
6410  bool CanThrow() const OVERRIDE { return true; }
6411
6412  DataType::Type GetFieldType() const { return GetType(); }
6413  uint32_t GetFieldIndex() const { return field_index_; }
6414
6415  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6416
6417 protected:
6418  DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
6419
6420 private:
6421  const uint32_t field_index_;
6422};
6423
6424class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6425 public:
6426  HUnresolvedInstanceFieldSet(HInstruction* obj,
6427                              HInstruction* value,
6428                              DataType::Type field_type,
6429                              uint32_t field_index,
6430                              uint32_t dex_pc)
6431      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6432        field_index_(field_index) {
6433    SetPackedField<FieldTypeField>(field_type);
6434    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6435    SetRawInputAt(0, obj);
6436    SetRawInputAt(1, value);
6437  }
6438
6439  bool IsClonable() const OVERRIDE { return true; }
6440  bool NeedsEnvironment() const OVERRIDE { return true; }
6441  bool CanThrow() const OVERRIDE { return true; }
6442
6443  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6444  uint32_t GetFieldIndex() const { return field_index_; }
6445
6446  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6447
6448 protected:
6449  DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
6450
6451 private:
6452  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6453  static constexpr size_t kFieldFieldTypeSize =
6454      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6455  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6456      kFieldFieldType + kFieldFieldTypeSize;
6457  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6458                "Too many packed fields.");
6459  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6460
6461  const uint32_t field_index_;
6462};
6463
6464class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6465 public:
6466  HUnresolvedStaticFieldGet(DataType::Type field_type,
6467                            uint32_t field_index,
6468                            uint32_t dex_pc)
6469      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6470        field_index_(field_index) {
6471  }
6472
6473  bool IsClonable() const OVERRIDE { return true; }
6474  bool NeedsEnvironment() const OVERRIDE { return true; }
6475  bool CanThrow() const OVERRIDE { return true; }
6476
6477  DataType::Type GetFieldType() const { return GetType(); }
6478  uint32_t GetFieldIndex() const { return field_index_; }
6479
6480  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6481
6482 protected:
6483  DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
6484
6485 private:
6486  const uint32_t field_index_;
6487};
6488
6489class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6490 public:
6491  HUnresolvedStaticFieldSet(HInstruction* value,
6492                            DataType::Type field_type,
6493                            uint32_t field_index,
6494                            uint32_t dex_pc)
6495      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6496        field_index_(field_index) {
6497    SetPackedField<FieldTypeField>(field_type);
6498    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6499    SetRawInputAt(0, value);
6500  }
6501
6502  bool IsClonable() const OVERRIDE { return true; }
6503  bool NeedsEnvironment() const OVERRIDE { return true; }
6504  bool CanThrow() const OVERRIDE { return true; }
6505
6506  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6507  uint32_t GetFieldIndex() const { return field_index_; }
6508
6509  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6510
6511 protected:
6512  DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
6513
6514 private:
6515  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6516  static constexpr size_t kFieldFieldTypeSize =
6517      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6518  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6519      kFieldFieldType + kFieldFieldTypeSize;
6520  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6521                "Too many packed fields.");
6522  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6523
6524  const uint32_t field_index_;
6525};
6526
6527// Implement the move-exception DEX instruction.
6528class HLoadException FINAL : public HExpression<0> {
6529 public:
6530  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6531      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc) {}
6532
6533  bool CanBeNull() const OVERRIDE { return false; }
6534
6535  DECLARE_INSTRUCTION(LoadException);
6536
6537 protected:
6538  DEFAULT_COPY_CONSTRUCTOR(LoadException);
6539};
6540
6541// Implicit part of move-exception which clears thread-local exception storage.
6542// Must not be removed because the runtime expects the TLS to get cleared.
6543class HClearException FINAL : public HTemplateInstruction<0> {
6544 public:
6545  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6546      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6547
6548  DECLARE_INSTRUCTION(ClearException);
6549
6550 protected:
6551  DEFAULT_COPY_CONSTRUCTOR(ClearException);
6552};
6553
6554class HThrow FINAL : public HTemplateInstruction<1> {
6555 public:
6556  HThrow(HInstruction* exception, uint32_t dex_pc)
6557      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6558    SetRawInputAt(0, exception);
6559  }
6560
6561  bool IsControlFlow() const OVERRIDE { return true; }
6562
6563  bool NeedsEnvironment() const OVERRIDE { return true; }
6564
6565  bool CanThrow() const OVERRIDE { return true; }
6566
6567  DECLARE_INSTRUCTION(Throw);
6568
6569 protected:
6570  DEFAULT_COPY_CONSTRUCTOR(Throw);
6571};
6572
6573/**
6574 * Implementation strategies for the code generator of a HInstanceOf
6575 * or `HCheckCast`.
6576 */
6577enum class TypeCheckKind {
6578  kUnresolvedCheck,       // Check against an unresolved type.
6579  kExactCheck,            // Can do a single class compare.
6580  kClassHierarchyCheck,   // Can just walk the super class chain.
6581  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6582  kInterfaceCheck,        // No optimization yet when checking against an interface.
6583  kArrayObjectCheck,      // Can just check if the array is not primitive.
6584  kArrayCheck,            // No optimization yet when checking against a generic array.
6585  kLast = kArrayCheck
6586};
6587
6588std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6589
6590class HInstanceOf FINAL : public HExpression<2> {
6591 public:
6592  HInstanceOf(HInstruction* object,
6593              HLoadClass* constant,
6594              TypeCheckKind check_kind,
6595              uint32_t dex_pc)
6596      : HExpression(DataType::Type::kBool,
6597                    SideEffectsForArchRuntimeCalls(check_kind),
6598                    dex_pc) {
6599    SetPackedField<TypeCheckKindField>(check_kind);
6600    SetPackedFlag<kFlagMustDoNullCheck>(true);
6601    SetRawInputAt(0, object);
6602    SetRawInputAt(1, constant);
6603  }
6604
6605  bool IsClonable() const OVERRIDE { return true; }
6606  bool CanBeMoved() const OVERRIDE { return true; }
6607
6608  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6609    return true;
6610  }
6611
6612  bool NeedsEnvironment() const OVERRIDE {
6613    return CanCallRuntime(GetTypeCheckKind());
6614  }
6615
6616  // Used only in code generation.
6617  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6618  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6619  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6620  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6621
6622  static bool CanCallRuntime(TypeCheckKind check_kind) {
6623    // Mips currently does runtime calls for any other checks.
6624    return check_kind != TypeCheckKind::kExactCheck;
6625  }
6626
6627  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6628    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6629  }
6630
6631  DECLARE_INSTRUCTION(InstanceOf);
6632
6633 protected:
6634  DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
6635
6636 private:
6637  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6638  static constexpr size_t kFieldTypeCheckKindSize =
6639      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6640  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6641  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6642  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6643  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6644};
6645
6646class HBoundType FINAL : public HExpression<1> {
6647 public:
6648  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6649      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc),
6650        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6651    SetPackedFlag<kFlagUpperCanBeNull>(true);
6652    SetPackedFlag<kFlagCanBeNull>(true);
6653    DCHECK_EQ(input->GetType(), DataType::Type::kReference);
6654    SetRawInputAt(0, input);
6655  }
6656
6657  bool IsClonable() const OVERRIDE { return true; }
6658
6659  // {Get,Set}Upper* should only be used in reference type propagation.
6660  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6661  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6662  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6663
6664  void SetCanBeNull(bool can_be_null) {
6665    DCHECK(GetUpperCanBeNull() || !can_be_null);
6666    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6667  }
6668
6669  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6670
6671  DECLARE_INSTRUCTION(BoundType);
6672
6673 protected:
6674  DEFAULT_COPY_CONSTRUCTOR(BoundType);
6675
6676 private:
6677  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6678  // is false then CanBeNull() cannot be true).
6679  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6680  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6681  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6682  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6683
6684  // Encodes the most upper class that this instruction can have. In other words
6685  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6686  // It is used to bound the type in cases like:
6687  //   if (x instanceof ClassX) {
6688  //     // uper_bound_ will be ClassX
6689  //   }
6690  ReferenceTypeInfo upper_bound_;
6691};
6692
6693class HCheckCast FINAL : public HTemplateInstruction<2> {
6694 public:
6695  HCheckCast(HInstruction* object,
6696             HLoadClass* constant,
6697             TypeCheckKind check_kind,
6698             uint32_t dex_pc)
6699      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6700    SetPackedField<TypeCheckKindField>(check_kind);
6701    SetPackedFlag<kFlagMustDoNullCheck>(true);
6702    SetRawInputAt(0, object);
6703    SetRawInputAt(1, constant);
6704  }
6705
6706  bool IsClonable() const OVERRIDE { return true; }
6707  bool CanBeMoved() const OVERRIDE { return true; }
6708
6709  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6710    return true;
6711  }
6712
6713  bool NeedsEnvironment() const OVERRIDE {
6714    // Instruction may throw a CheckCastError.
6715    return true;
6716  }
6717
6718  bool CanThrow() const OVERRIDE { return true; }
6719
6720  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6721  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6722  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6723  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6724
6725  DECLARE_INSTRUCTION(CheckCast);
6726
6727 protected:
6728  DEFAULT_COPY_CONSTRUCTOR(CheckCast);
6729
6730 private:
6731  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6732  static constexpr size_t kFieldTypeCheckKindSize =
6733      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6734  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6735  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6736  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6737  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6738};
6739
6740/**
6741 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6742 * @details We define the combined barrier types that are actually required
6743 * by the Java Memory Model, rather than using exactly the terminology from
6744 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6745 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6746 * store atomicity issues, and the recipes there are not always entirely sufficient.
6747 * The current recipe is as follows:
6748 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6749 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6750 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6751 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6752 *    class has final fields.
6753 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6754 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6755 */
6756enum MemBarrierKind {
6757  kAnyStore,
6758  kLoadAny,
6759  kStoreStore,
6760  kAnyAny,
6761  kNTStoreStore,
6762  kLastBarrierKind = kNTStoreStore
6763};
6764std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6765
6766class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6767 public:
6768  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6769      : HTemplateInstruction(
6770            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6771    SetPackedField<BarrierKindField>(barrier_kind);
6772  }
6773
6774  bool IsClonable() const OVERRIDE { return true; }
6775
6776  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6777
6778  DECLARE_INSTRUCTION(MemoryBarrier);
6779
6780 protected:
6781  DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
6782
6783 private:
6784  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6785  static constexpr size_t kFieldBarrierKindSize =
6786      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6787  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6788      kFieldBarrierKind + kFieldBarrierKindSize;
6789  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6790                "Too many packed fields.");
6791  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6792};
6793
6794// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6795// the specified object(s), with respect to any subsequent store that might "publish"
6796// (i.e. make visible) the specified object to another thread.
6797//
6798// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6799// for all final fields (that were set) at the end of the invoked constructor.
6800//
6801// The constructor fence models the freeze actions for the final fields of an object
6802// being constructed (semantically at the end of the constructor). Constructor fences
6803// have a per-object affinity; two separate objects being constructed get two separate
6804// constructor fences.
6805//
6806// (Note: that if calling a super-constructor or forwarding to another constructor,
6807// the freezes would happen at the end of *that* constructor being invoked).
6808//
6809// The memory model guarantees that when the object being constructed is "published" after
6810// constructor completion (i.e. escapes the current thread via a store), then any final field
6811// writes must be observable on other threads (once they observe that publication).
6812//
6813// Further, anything written before the freeze, and read by dereferencing through the final field,
6814// must also be visible (so final object field could itself have an object with non-final fields;
6815// yet the freeze must also extend to them).
6816//
6817// Constructor example:
6818//
6819//     class HasFinal {
6820//        final int field;                              Optimizing IR for <init>()V:
6821//        HasFinal() {
6822//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6823//          // freeze(this.field);                      HConstructorFence(this)
6824//        }                                             HReturn
6825//     }
6826//
6827// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6828// already-initialized classes; in that case the allocation must act as a "default-initializer"
6829// of the object which effectively writes the class pointer "final field".
6830//
6831// For example, we can model default-initialiation as roughly the equivalent of the following:
6832//
6833//     class Object {
6834//       private final Class header;
6835//     }
6836//
6837//  Java code:                                           Optimizing IR:
6838//
6839//     T new_instance<T>() {
6840//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6841//       obj.header = T.class;                           // header write is done by above call.
6842//       // freeze(obj.header)                           HConstructorFence(obj)
6843//       return (T)obj;
6844//     }
6845//
6846// See also:
6847// * CompilerDriver::RequiresConstructorBarrier
6848// * QuasiAtomic::ThreadFenceForConstructor
6849//
6850class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6851                                  // A fence has variable inputs because the inputs can be removed
6852                                  // after prepare_for_register_allocation phase.
6853                                  // (TODO: In the future a fence could freeze multiple objects
6854                                  //        after merging two fences together.)
6855 public:
6856  // `fence_object` is the reference that needs to be protected for correct publication.
6857  //
6858  // It makes sense in the following situations:
6859  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6860  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6861  //
6862  // After construction the `fence_object` becomes the 0th input.
6863  // This is not an input in a real sense, but just a convenient place to stash the information
6864  // about the associated object.
6865  HConstructorFence(HInstruction* fence_object,
6866                    uint32_t dex_pc,
6867                    ArenaAllocator* allocator)
6868    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6869    // constraints described in the class header. We claim that these SideEffects constraints
6870    // enforce a superset of the real constraints.
6871    //
6872    // The ordering described above is conservatively modeled with SideEffects as follows:
6873    //
6874    // * To prevent reordering of the publication stores:
6875    // ----> "Reads of objects" is the initial SideEffect.
6876    // * For every primitive final field store in the constructor:
6877    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6878    // * If there are any stores to reference final fields in the constructor:
6879    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6880    //       that are reachable from `fence_object` also need to be prevented for reordering
6881    //       (and we do not want to do alias analysis to figure out what those stores are).
6882    //
6883    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6884    // even more conservative approach than the one described above, and prevents all of the
6885    // above reordering without analyzing any of the instructions in the constructor.
6886    //
6887    // If in a later phase we discover that there are no writes to reference final fields,
6888    // we can refine the side effect to a smaller set of type reads (see above constraints).
6889      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6890                                      dex_pc,
6891                                      allocator,
6892                                      /* number_of_inputs */ 1,
6893                                      kArenaAllocConstructorFenceInputs) {
6894    DCHECK(fence_object != nullptr);
6895    SetRawInputAt(0, fence_object);
6896  }
6897
6898  // The object associated with this constructor fence.
6899  //
6900  // (Note: This will be null after the prepare_for_register_allocation phase,
6901  // as all constructor fence inputs are removed there).
6902  HInstruction* GetFenceObject() const {
6903    return InputAt(0);
6904  }
6905
6906  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6907  // - Delete `fence_use` from `this`'s use list.
6908  // - Delete `this` from `fence_use`'s inputs list.
6909  // - If the `fence_use` is dead, remove it from the graph.
6910  //
6911  // A fence is considered dead once it no longer has any uses
6912  // and all of the inputs are dead.
6913  //
6914  // This must *not* be called during/after prepare_for_register_allocation,
6915  // because that removes all the inputs to the fences but the fence is actually
6916  // still considered live.
6917  //
6918  // Returns how many HConstructorFence instructions were removed from graph.
6919  static size_t RemoveConstructorFences(HInstruction* instruction);
6920
6921  // Combine all inputs of `this` and `other` instruction and remove
6922  // `other` from the graph.
6923  //
6924  // Inputs are unique after the merge.
6925  //
6926  // Requirement: `this` must not be the same as `other.
6927  void Merge(HConstructorFence* other);
6928
6929  // Check if this constructor fence is protecting
6930  // an HNewInstance or HNewArray that is also the immediate
6931  // predecessor of `this`.
6932  //
6933  // If `ignore_inputs` is true, then the immediate predecessor doesn't need
6934  // to be one of the inputs of `this`.
6935  //
6936  // Returns the associated HNewArray or HNewInstance,
6937  // or null otherwise.
6938  HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
6939
6940  DECLARE_INSTRUCTION(ConstructorFence);
6941
6942 protected:
6943  DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
6944};
6945
6946class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6947 public:
6948  enum class OperationKind {
6949    kEnter,
6950    kExit,
6951    kLast = kExit
6952  };
6953
6954  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6955    : HTemplateInstruction(
6956          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6957          dex_pc) {
6958    SetPackedField<OperationKindField>(kind);
6959    SetRawInputAt(0, object);
6960  }
6961
6962  // Instruction may go into runtime, so we need an environment.
6963  bool NeedsEnvironment() const OVERRIDE { return true; }
6964
6965  bool CanThrow() const OVERRIDE {
6966    // Verifier guarantees that monitor-exit cannot throw.
6967    // This is important because it allows the HGraphBuilder to remove
6968    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6969    return IsEnter();
6970  }
6971
6972  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6973  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6974
6975  DECLARE_INSTRUCTION(MonitorOperation);
6976
6977 protected:
6978  DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
6979
6980 private:
6981  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6982  static constexpr size_t kFieldOperationKindSize =
6983      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6984  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6985      kFieldOperationKind + kFieldOperationKindSize;
6986  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6987                "Too many packed fields.");
6988  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6989};
6990
6991class HSelect FINAL : public HExpression<3> {
6992 public:
6993  HSelect(HInstruction* condition,
6994          HInstruction* true_value,
6995          HInstruction* false_value,
6996          uint32_t dex_pc)
6997      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6998    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6999
7000    // First input must be `true_value` or `false_value` to allow codegens to
7001    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
7002    // that architectures which implement HSelect as a conditional move also
7003    // will not need to invert the condition.
7004    SetRawInputAt(0, false_value);
7005    SetRawInputAt(1, true_value);
7006    SetRawInputAt(2, condition);
7007  }
7008
7009  bool IsClonable() const OVERRIDE { return true; }
7010  HInstruction* GetFalseValue() const { return InputAt(0); }
7011  HInstruction* GetTrueValue() const { return InputAt(1); }
7012  HInstruction* GetCondition() const { return InputAt(2); }
7013
7014  bool CanBeMoved() const OVERRIDE { return true; }
7015  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
7016    return true;
7017  }
7018
7019  bool CanBeNull() const OVERRIDE {
7020    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
7021  }
7022
7023  DECLARE_INSTRUCTION(Select);
7024
7025 protected:
7026  DEFAULT_COPY_CONSTRUCTOR(Select);
7027};
7028
7029class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
7030 public:
7031  MoveOperands(Location source,
7032               Location destination,
7033               DataType::Type type,
7034               HInstruction* instruction)
7035      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
7036
7037  Location GetSource() const { return source_; }
7038  Location GetDestination() const { return destination_; }
7039
7040  void SetSource(Location value) { source_ = value; }
7041  void SetDestination(Location value) { destination_ = value; }
7042
7043  // The parallel move resolver marks moves as "in-progress" by clearing the
7044  // destination (but not the source).
7045  Location MarkPending() {
7046    DCHECK(!IsPending());
7047    Location dest = destination_;
7048    destination_ = Location::NoLocation();
7049    return dest;
7050  }
7051
7052  void ClearPending(Location dest) {
7053    DCHECK(IsPending());
7054    destination_ = dest;
7055  }
7056
7057  bool IsPending() const {
7058    DCHECK(source_.IsValid() || destination_.IsInvalid());
7059    return destination_.IsInvalid() && source_.IsValid();
7060  }
7061
7062  // True if this blocks a move from the given location.
7063  bool Blocks(Location loc) const {
7064    return !IsEliminated() && source_.OverlapsWith(loc);
7065  }
7066
7067  // A move is redundant if it's been eliminated, if its source and
7068  // destination are the same, or if its destination is unneeded.
7069  bool IsRedundant() const {
7070    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
7071  }
7072
7073  // We clear both operands to indicate move that's been eliminated.
7074  void Eliminate() {
7075    source_ = destination_ = Location::NoLocation();
7076  }
7077
7078  bool IsEliminated() const {
7079    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
7080    return source_.IsInvalid();
7081  }
7082
7083  DataType::Type GetType() const { return type_; }
7084
7085  bool Is64BitMove() const {
7086    return DataType::Is64BitType(type_);
7087  }
7088
7089  HInstruction* GetInstruction() const { return instruction_; }
7090
7091 private:
7092  Location source_;
7093  Location destination_;
7094  // The type this move is for.
7095  DataType::Type type_;
7096  // The instruction this move is assocatied with. Null when this move is
7097  // for moving an input in the expected locations of user (including a phi user).
7098  // This is only used in debug mode, to ensure we do not connect interval siblings
7099  // in the same parallel move.
7100  HInstruction* instruction_;
7101};
7102
7103std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
7104
7105static constexpr size_t kDefaultNumberOfMoves = 4;
7106
7107class HParallelMove FINAL : public HTemplateInstruction<0> {
7108 public:
7109  explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
7110      : HTemplateInstruction(SideEffects::None(), dex_pc),
7111        moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
7112    moves_.reserve(kDefaultNumberOfMoves);
7113  }
7114
7115  void AddMove(Location source,
7116               Location destination,
7117               DataType::Type type,
7118               HInstruction* instruction) {
7119    DCHECK(source.IsValid());
7120    DCHECK(destination.IsValid());
7121    if (kIsDebugBuild) {
7122      if (instruction != nullptr) {
7123        for (const MoveOperands& move : moves_) {
7124          if (move.GetInstruction() == instruction) {
7125            // Special case the situation where the move is for the spill slot
7126            // of the instruction.
7127            if ((GetPrevious() == instruction)
7128                || ((GetPrevious() == nullptr)
7129                    && instruction->IsPhi()
7130                    && instruction->GetBlock() == GetBlock())) {
7131              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
7132                  << "Doing parallel moves for the same instruction.";
7133            } else {
7134              DCHECK(false) << "Doing parallel moves for the same instruction.";
7135            }
7136          }
7137        }
7138      }
7139      for (const MoveOperands& move : moves_) {
7140        DCHECK(!destination.OverlapsWith(move.GetDestination()))
7141            << "Overlapped destination for two moves in a parallel move: "
7142            << move.GetSource() << " ==> " << move.GetDestination() << " and "
7143            << source << " ==> " << destination;
7144      }
7145    }
7146    moves_.emplace_back(source, destination, type, instruction);
7147  }
7148
7149  MoveOperands* MoveOperandsAt(size_t index) {
7150    return &moves_[index];
7151  }
7152
7153  size_t NumMoves() const { return moves_.size(); }
7154
7155  DECLARE_INSTRUCTION(ParallelMove);
7156
7157 protected:
7158  DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
7159
7160 private:
7161  ArenaVector<MoveOperands> moves_;
7162};
7163
7164// This instruction computes an intermediate address pointing in the 'middle' of an object. The
7165// result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
7166// never used across anything that can trigger GC.
7167// The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
7168// So we represent it by the type `DataType::Type::kInt`.
7169class HIntermediateAddress FINAL : public HExpression<2> {
7170 public:
7171  HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
7172      : HExpression(DataType::Type::kInt32, SideEffects::DependsOnGC(), dex_pc) {
7173        DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
7174                  DataType::Size(DataType::Type::kReference))
7175            << "kPrimInt and kPrimNot have different sizes.";
7176    SetRawInputAt(0, base_address);
7177    SetRawInputAt(1, offset);
7178  }
7179
7180  bool IsClonable() const OVERRIDE { return true; }
7181  bool CanBeMoved() const OVERRIDE { return true; }
7182  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
7183    return true;
7184  }
7185  bool IsActualObject() const OVERRIDE { return false; }
7186
7187  HInstruction* GetBaseAddress() const { return InputAt(0); }
7188  HInstruction* GetOffset() const { return InputAt(1); }
7189
7190  DECLARE_INSTRUCTION(IntermediateAddress);
7191
7192 protected:
7193  DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
7194};
7195
7196
7197}  // namespace art
7198
7199#include "nodes_vector.h"
7200
7201#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
7202#include "nodes_shared.h"
7203#endif
7204#ifdef ART_ENABLE_CODEGEN_mips
7205#include "nodes_mips.h"
7206#endif
7207#ifdef ART_ENABLE_CODEGEN_x86
7208#include "nodes_x86.h"
7209#endif
7210
7211namespace art {
7212
7213class OptimizingCompilerStats;
7214
7215class HGraphVisitor : public ValueObject {
7216 public:
7217  explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
7218      : stats_(stats),
7219        graph_(graph) {}
7220  virtual ~HGraphVisitor() {}
7221
7222  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
7223  virtual void VisitBasicBlock(HBasicBlock* block);
7224
7225  // Visit the graph following basic block insertion order.
7226  void VisitInsertionOrder();
7227
7228  // Visit the graph following dominator tree reverse post-order.
7229  void VisitReversePostOrder();
7230
7231  HGraph* GetGraph() const { return graph_; }
7232
7233  // Visit functions for instruction classes.
7234#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7235  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
7236
7237  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7238
7239#undef DECLARE_VISIT_INSTRUCTION
7240
7241 protected:
7242  OptimizingCompilerStats* stats_;
7243
7244 private:
7245  HGraph* const graph_;
7246
7247  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
7248};
7249
7250class HGraphDelegateVisitor : public HGraphVisitor {
7251 public:
7252  explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
7253      : HGraphVisitor(graph, stats) {}
7254  virtual ~HGraphDelegateVisitor() {}
7255
7256  // Visit functions that delegate to to super class.
7257#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7258  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
7259
7260  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7261
7262#undef DECLARE_VISIT_INSTRUCTION
7263
7264 private:
7265  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
7266};
7267
7268// Create a clone of the instruction, insert it into the graph; replace the old one with a new
7269// and remove the old instruction.
7270HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
7271
7272// Create a clone for each clonable instructions/phis and replace the original with the clone.
7273//
7274// Used for testing individual instruction cloner.
7275class CloneAndReplaceInstructionVisitor : public HGraphDelegateVisitor {
7276 public:
7277  explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
7278      : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count(0) {}
7279
7280  void VisitInstruction(HInstruction* instruction) OVERRIDE {
7281    if (instruction->IsClonable()) {
7282      ReplaceInstrOrPhiByClone(instruction);
7283      instr_replaced_by_clones_count++;
7284    }
7285  }
7286
7287  size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count; }
7288
7289 private:
7290  size_t instr_replaced_by_clones_count;
7291
7292  DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
7293};
7294
7295// Iterator over the blocks that art part of the loop. Includes blocks part
7296// of an inner loop. The order in which the blocks are iterated is on their
7297// block id.
7298class HBlocksInLoopIterator : public ValueObject {
7299 public:
7300  explicit HBlocksInLoopIterator(const HLoopInformation& info)
7301      : blocks_in_loop_(info.GetBlocks()),
7302        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
7303        index_(0) {
7304    if (!blocks_in_loop_.IsBitSet(index_)) {
7305      Advance();
7306    }
7307  }
7308
7309  bool Done() const { return index_ == blocks_.size(); }
7310  HBasicBlock* Current() const { return blocks_[index_]; }
7311  void Advance() {
7312    ++index_;
7313    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7314      if (blocks_in_loop_.IsBitSet(index_)) {
7315        break;
7316      }
7317    }
7318  }
7319
7320 private:
7321  const BitVector& blocks_in_loop_;
7322  const ArenaVector<HBasicBlock*>& blocks_;
7323  size_t index_;
7324
7325  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
7326};
7327
7328// Iterator over the blocks that art part of the loop. Includes blocks part
7329// of an inner loop. The order in which the blocks are iterated is reverse
7330// post order.
7331class HBlocksInLoopReversePostOrderIterator : public ValueObject {
7332 public:
7333  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
7334      : blocks_in_loop_(info.GetBlocks()),
7335        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
7336        index_(0) {
7337    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7338      Advance();
7339    }
7340  }
7341
7342  bool Done() const { return index_ == blocks_.size(); }
7343  HBasicBlock* Current() const { return blocks_[index_]; }
7344  void Advance() {
7345    ++index_;
7346    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7347      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7348        break;
7349      }
7350    }
7351  }
7352
7353 private:
7354  const BitVector& blocks_in_loop_;
7355  const ArenaVector<HBasicBlock*>& blocks_;
7356  size_t index_;
7357
7358  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7359};
7360
7361// Returns int64_t value of a properly typed constant.
7362inline int64_t Int64FromConstant(HConstant* constant) {
7363  if (constant->IsIntConstant()) {
7364    return constant->AsIntConstant()->GetValue();
7365  } else if (constant->IsLongConstant()) {
7366    return constant->AsLongConstant()->GetValue();
7367  } else {
7368    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7369    return 0;
7370  }
7371}
7372
7373// Returns true iff instruction is an integral constant (and sets value on success).
7374inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7375  if (instruction->IsIntConstant()) {
7376    *value = instruction->AsIntConstant()->GetValue();
7377    return true;
7378  } else if (instruction->IsLongConstant()) {
7379    *value = instruction->AsLongConstant()->GetValue();
7380    return true;
7381  } else if (instruction->IsNullConstant()) {
7382    *value = 0;
7383    return true;
7384  }
7385  return false;
7386}
7387
7388// Returns true iff instruction is the given integral constant.
7389inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
7390  int64_t val = 0;
7391  return IsInt64AndGet(instruction, &val) && val == value;
7392}
7393
7394// Returns true iff instruction is a zero bit pattern.
7395inline bool IsZeroBitPattern(HInstruction* instruction) {
7396  return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
7397}
7398
7399#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7400  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7401  inline const H##type* HInstruction::As##type() const {                       \
7402    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7403  }                                                                            \
7404  inline H##type* HInstruction::As##type() {                                   \
7405    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7406  }
7407
7408  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7409#undef INSTRUCTION_TYPE_CHECK
7410
7411// Create space in `blocks` for adding `number_of_new_blocks` entries
7412// starting at location `at`. Blocks after `at` are moved accordingly.
7413inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7414                        size_t number_of_new_blocks,
7415                        size_t after) {
7416  DCHECK_LT(after, blocks->size());
7417  size_t old_size = blocks->size();
7418  size_t new_size = old_size + number_of_new_blocks;
7419  blocks->resize(new_size);
7420  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7421}
7422
7423/*
7424 * Hunt "under the hood" of array lengths (leading to array references),
7425 * null checks (also leading to array references), and new arrays
7426 * (leading to the actual length). This makes it more likely related
7427 * instructions become actually comparable.
7428 */
7429inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7430  while (instruction->IsArrayLength() ||
7431         instruction->IsNullCheck() ||
7432         instruction->IsNewArray()) {
7433    instruction = instruction->IsNewArray()
7434        ? instruction->AsNewArray()->GetLength()
7435        : instruction->InputAt(0);
7436  }
7437  return instruction;
7438}
7439
7440void RemoveEnvironmentUses(HInstruction* instruction);
7441bool HasEnvironmentUsedByOthers(HInstruction* instruction);
7442void ResetEnvironmentInputRecords(HInstruction* instruction);
7443
7444}  // namespace art
7445
7446#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7447