nodes.h revision 847e6ce98b4b822fd94c631975763845978ebaa3
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "dex_file_types.h"
33#include "deoptimization_kind.h"
34#include "entrypoints/quick/quick_entrypoints_enum.h"
35#include "handle.h"
36#include "handle_scope.h"
37#include "invoke_type.h"
38#include "intrinsics_enum.h"
39#include "locations.h"
40#include "method_reference.h"
41#include "mirror/class.h"
42#include "offsets.h"
43#include "primitive.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class GraphChecker;
49class HBasicBlock;
50class HConstructorFence;
51class HCurrentMethod;
52class HDoubleConstant;
53class HEnvironment;
54class HFloatConstant;
55class HGraphBuilder;
56class HGraphVisitor;
57class HInstruction;
58class HIntConstant;
59class HInvoke;
60class HLongConstant;
61class HNullConstant;
62class HParameterValue;
63class HPhi;
64class HSuspendCheck;
65class HTryBoundary;
66class LiveInterval;
67class LocationSummary;
68class SlowPathCode;
69class SsaBuilder;
70
71namespace mirror {
72class DexCache;
73}  // namespace mirror
74
75static const int kDefaultNumberOfBlocks = 8;
76static const int kDefaultNumberOfSuccessors = 2;
77static const int kDefaultNumberOfPredecessors = 2;
78static const int kDefaultNumberOfExceptionalPredecessors = 0;
79static const int kDefaultNumberOfDominatedBlocks = 1;
80static const int kDefaultNumberOfBackEdges = 1;
81
82// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
83static constexpr int32_t kMaxIntShiftDistance = 0x1f;
84// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
85static constexpr int32_t kMaxLongShiftDistance = 0x3f;
86
87static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
88static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
89
90static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
91
92static constexpr uint32_t kNoDexPc = -1;
93
94inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
95  // For the purposes of the compiler, the dex files must actually be the same object
96  // if we want to safely treat them as the same. This is especially important for JIT
97  // as custom class loaders can open the same underlying file (or memory) multiple
98  // times and provide different class resolution but no two class loaders should ever
99  // use the same DexFile object - doing so is an unsupported hack that can lead to
100  // all sorts of weird failures.
101  return &lhs == &rhs;
102}
103
104enum IfCondition {
105  // All types.
106  kCondEQ,  // ==
107  kCondNE,  // !=
108  // Signed integers and floating-point numbers.
109  kCondLT,  // <
110  kCondLE,  // <=
111  kCondGT,  // >
112  kCondGE,  // >=
113  // Unsigned integers.
114  kCondB,   // <
115  kCondBE,  // <=
116  kCondA,   // >
117  kCondAE,  // >=
118  // First and last aliases.
119  kCondFirst = kCondEQ,
120  kCondLast = kCondAE,
121};
122
123enum GraphAnalysisResult {
124  kAnalysisSkipped,
125  kAnalysisInvalidBytecode,
126  kAnalysisFailThrowCatchLoop,
127  kAnalysisFailAmbiguousArrayOp,
128  kAnalysisSuccess,
129};
130
131template <typename T>
132static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
133  return static_cast<typename std::make_unsigned<T>::type>(x);
134}
135
136class HInstructionList : public ValueObject {
137 public:
138  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
139
140  void AddInstruction(HInstruction* instruction);
141  void RemoveInstruction(HInstruction* instruction);
142
143  // Insert `instruction` before/after an existing instruction `cursor`.
144  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
145  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
146
147  // Return true if this list contains `instruction`.
148  bool Contains(HInstruction* instruction) const;
149
150  // Return true if `instruction1` is found before `instruction2` in
151  // this instruction list and false otherwise.  Abort if none
152  // of these instructions is found.
153  bool FoundBefore(const HInstruction* instruction1,
154                   const HInstruction* instruction2) const;
155
156  bool IsEmpty() const { return first_instruction_ == nullptr; }
157  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
158
159  // Update the block of all instructions to be `block`.
160  void SetBlockOfInstructions(HBasicBlock* block) const;
161
162  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
163  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
164  void Add(const HInstructionList& instruction_list);
165
166  // Return the number of instructions in the list. This is an expensive operation.
167  size_t CountSize() const;
168
169 private:
170  HInstruction* first_instruction_;
171  HInstruction* last_instruction_;
172
173  friend class HBasicBlock;
174  friend class HGraph;
175  friend class HInstruction;
176  friend class HInstructionIterator;
177  friend class HInstructionIteratorHandleChanges;
178  friend class HBackwardInstructionIterator;
179
180  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
181};
182
183class ReferenceTypeInfo : ValueObject {
184 public:
185  typedef Handle<mirror::Class> TypeHandle;
186
187  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
188
189  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
190    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
191  }
192
193  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
194    return ReferenceTypeInfo(type_handle, is_exact);
195  }
196
197  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
198
199  static bool IsValidHandle(TypeHandle handle) {
200    return handle.GetReference() != nullptr;
201  }
202
203  bool IsValid() const {
204    return IsValidHandle(type_handle_);
205  }
206
207  bool IsExact() const { return is_exact_; }
208
209  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsObjectClass();
212  }
213
214  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    return GetTypeHandle()->IsStringClass();
217  }
218
219  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
220    DCHECK(IsValid());
221    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
222  }
223
224  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
225    DCHECK(IsValid());
226    return GetTypeHandle()->IsInterface();
227  }
228
229  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230    DCHECK(IsValid());
231    return GetTypeHandle()->IsArrayClass();
232  }
233
234  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
235    DCHECK(IsValid());
236    return GetTypeHandle()->IsPrimitiveArray();
237  }
238
239  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
240    DCHECK(IsValid());
241    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
242  }
243
244  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
245    DCHECK(IsValid());
246    if (!IsExact()) return false;
247    if (!IsArrayClass()) return false;
248    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
249  }
250
251  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
252    DCHECK(IsValid());
253    if (!IsExact()) return false;
254    if (!IsArrayClass()) return false;
255    if (!rti.IsArrayClass()) return false;
256    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
257        rti.GetTypeHandle()->GetComponentType());
258  }
259
260  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
261
262  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
263    DCHECK(IsValid());
264    DCHECK(rti.IsValid());
265    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
266  }
267
268  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269    DCHECK(IsValid());
270    DCHECK(rti.IsValid());
271    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
272        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
273  }
274
275  // Returns true if the type information provide the same amount of details.
276  // Note that it does not mean that the instructions have the same actual type
277  // (because the type can be the result of a merge).
278  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
279    if (!IsValid() && !rti.IsValid()) {
280      // Invalid types are equal.
281      return true;
282    }
283    if (!IsValid() || !rti.IsValid()) {
284      // One is valid, the other not.
285      return false;
286    }
287    return IsExact() == rti.IsExact()
288        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
289  }
290
291 private:
292  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
293  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
294      : type_handle_(type_handle), is_exact_(is_exact) { }
295
296  // The class of the object.
297  TypeHandle type_handle_;
298  // Whether or not the type is exact or a superclass of the actual type.
299  // Whether or not we have any information about this type.
300  bool is_exact_;
301};
302
303std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
304
305// Control-flow graph of a method. Contains a list of basic blocks.
306class HGraph : public ArenaObject<kArenaAllocGraph> {
307 public:
308  HGraph(ArenaAllocator* arena,
309         const DexFile& dex_file,
310         uint32_t method_idx,
311         InstructionSet instruction_set,
312         InvokeType invoke_type = kInvalidInvokeType,
313         bool debuggable = false,
314         bool osr = false,
315         int start_instruction_id = 0)
316      : arena_(arena),
317        blocks_(arena->Adapter(kArenaAllocBlockList)),
318        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
319        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
320        entry_block_(nullptr),
321        exit_block_(nullptr),
322        maximum_number_of_out_vregs_(0),
323        number_of_vregs_(0),
324        number_of_in_vregs_(0),
325        temporaries_vreg_slots_(0),
326        has_bounds_checks_(false),
327        has_try_catch_(false),
328        has_simd_(false),
329        has_loops_(false),
330        has_irreducible_loops_(false),
331        debuggable_(debuggable),
332        current_instruction_id_(start_instruction_id),
333        dex_file_(dex_file),
334        method_idx_(method_idx),
335        invoke_type_(invoke_type),
336        in_ssa_form_(false),
337        number_of_cha_guards_(0),
338        instruction_set_(instruction_set),
339        cached_null_constant_(nullptr),
340        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
342        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
343        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
344        cached_current_method_(nullptr),
345        art_method_(nullptr),
346        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
347        osr_(osr),
348        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
349    blocks_.reserve(kDefaultNumberOfBlocks);
350  }
351
352  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
353  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
354
355  ArenaAllocator* GetArena() const { return arena_; }
356  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
357
358  bool IsInSsaForm() const { return in_ssa_form_; }
359  void SetInSsaForm() { in_ssa_form_ = true; }
360
361  HBasicBlock* GetEntryBlock() const { return entry_block_; }
362  HBasicBlock* GetExitBlock() const { return exit_block_; }
363  bool HasExitBlock() const { return exit_block_ != nullptr; }
364
365  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
366  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
367
368  void AddBlock(HBasicBlock* block);
369
370  void ComputeDominanceInformation();
371  void ClearDominanceInformation();
372  void ClearLoopInformation();
373  void FindBackEdges(ArenaBitVector* visited);
374  GraphAnalysisResult BuildDominatorTree();
375  void SimplifyCFG();
376  void SimplifyCatchBlocks();
377
378  // Analyze all natural loops in this graph. Returns a code specifying that it
379  // was successful or the reason for failure. The method will fail if a loop
380  // is a throw-catch loop, i.e. the header is a catch block.
381  GraphAnalysisResult AnalyzeLoops() const;
382
383  // Iterate over blocks to compute try block membership. Needs reverse post
384  // order and loop information.
385  void ComputeTryBlockInformation();
386
387  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
388  // Returns the instruction to replace the invoke expression or null if the
389  // invoke is for a void method. Note that the caller is responsible for replacing
390  // and removing the invoke instruction.
391  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
392
393  // Update the loop and try membership of `block`, which was spawned from `reference`.
394  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
395  // should be the new back edge.
396  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
397                                             HBasicBlock* reference,
398                                             bool replace_if_back_edge);
399
400  // Need to add a couple of blocks to test if the loop body is entered and
401  // put deoptimization instructions, etc.
402  void TransformLoopHeaderForBCE(HBasicBlock* header);
403
404  // Adds a new loop directly after the loop with the given header and exit.
405  // Returns the new preheader.
406  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
407                                             HBasicBlock* body,
408                                             HBasicBlock* exit);
409
410  // Removes `block` from the graph. Assumes `block` has been disconnected from
411  // other blocks and has no instructions or phis.
412  void DeleteDeadEmptyBlock(HBasicBlock* block);
413
414  // Splits the edge between `block` and `successor` while preserving the
415  // indices in the predecessor/successor lists. If there are multiple edges
416  // between the blocks, the lowest indices are used.
417  // Returns the new block which is empty and has the same dex pc as `successor`.
418  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
419
420  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
421  void SimplifyLoop(HBasicBlock* header);
422
423  int32_t GetNextInstructionId() {
424    DCHECK_NE(current_instruction_id_, INT32_MAX);
425    return current_instruction_id_++;
426  }
427
428  int32_t GetCurrentInstructionId() const {
429    return current_instruction_id_;
430  }
431
432  void SetCurrentInstructionId(int32_t id) {
433    DCHECK_GE(id, current_instruction_id_);
434    current_instruction_id_ = id;
435  }
436
437  uint16_t GetMaximumNumberOfOutVRegs() const {
438    return maximum_number_of_out_vregs_;
439  }
440
441  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
442    maximum_number_of_out_vregs_ = new_value;
443  }
444
445  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
446    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
447  }
448
449  void UpdateTemporariesVRegSlots(size_t slots) {
450    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
451  }
452
453  size_t GetTemporariesVRegSlots() const {
454    DCHECK(!in_ssa_form_);
455    return temporaries_vreg_slots_;
456  }
457
458  void SetNumberOfVRegs(uint16_t number_of_vregs) {
459    number_of_vregs_ = number_of_vregs;
460  }
461
462  uint16_t GetNumberOfVRegs() const {
463    return number_of_vregs_;
464  }
465
466  void SetNumberOfInVRegs(uint16_t value) {
467    number_of_in_vregs_ = value;
468  }
469
470  uint16_t GetNumberOfInVRegs() const {
471    return number_of_in_vregs_;
472  }
473
474  uint16_t GetNumberOfLocalVRegs() const {
475    DCHECK(!in_ssa_form_);
476    return number_of_vregs_ - number_of_in_vregs_;
477  }
478
479  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
480    return reverse_post_order_;
481  }
482
483  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
484    DCHECK(GetReversePostOrder()[0] == entry_block_);
485    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
486  }
487
488  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
489    return ReverseRange(GetReversePostOrder());
490  }
491
492  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
493    return linear_order_;
494  }
495
496  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
497    return ReverseRange(GetLinearOrder());
498  }
499
500  bool HasBoundsChecks() const {
501    return has_bounds_checks_;
502  }
503
504  void SetHasBoundsChecks(bool value) {
505    has_bounds_checks_ = value;
506  }
507
508  bool IsDebuggable() const { return debuggable_; }
509
510  // Returns a constant of the given type and value. If it does not exist
511  // already, it is created and inserted into the graph. This method is only for
512  // integral types.
513  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
514
515  // TODO: This is problematic for the consistency of reference type propagation
516  // because it can be created anytime after the pass and thus it will be left
517  // with an invalid type.
518  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
519
520  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
521    return CreateConstant(value, &cached_int_constants_, dex_pc);
522  }
523  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
524    return CreateConstant(value, &cached_long_constants_, dex_pc);
525  }
526  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
527    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
528  }
529  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
530    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
531  }
532
533  HCurrentMethod* GetCurrentMethod();
534
535  const DexFile& GetDexFile() const {
536    return dex_file_;
537  }
538
539  uint32_t GetMethodIdx() const {
540    return method_idx_;
541  }
542
543  // Get the method name (without the signature), e.g. "<init>"
544  const char* GetMethodName() const;
545
546  // Get the pretty method name (class + name + optionally signature).
547  std::string PrettyMethod(bool with_signature = true) const;
548
549  InvokeType GetInvokeType() const {
550    return invoke_type_;
551  }
552
553  InstructionSet GetInstructionSet() const {
554    return instruction_set_;
555  }
556
557  bool IsCompilingOsr() const { return osr_; }
558
559  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
560    return cha_single_implementation_list_;
561  }
562
563  void AddCHASingleImplementationDependency(ArtMethod* method) {
564    cha_single_implementation_list_.insert(method);
565  }
566
567  bool HasShouldDeoptimizeFlag() const {
568    return number_of_cha_guards_ != 0;
569  }
570
571  bool HasTryCatch() const { return has_try_catch_; }
572  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
573
574  bool HasSIMD() const { return has_simd_; }
575  void SetHasSIMD(bool value) { has_simd_ = value; }
576
577  bool HasLoops() const { return has_loops_; }
578  void SetHasLoops(bool value) { has_loops_ = value; }
579
580  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
581  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
582
583  ArtMethod* GetArtMethod() const { return art_method_; }
584  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
585
586  // Returns an instruction with the opposite Boolean value from 'cond'.
587  // The instruction has been inserted into the graph, either as a constant, or
588  // before cursor.
589  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
590
591  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
592
593  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
594  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
595  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
596
597 private:
598  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
599  void RemoveDeadBlocks(const ArenaBitVector& visited);
600
601  template <class InstructionType, typename ValueType>
602  InstructionType* CreateConstant(ValueType value,
603                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
604                                  uint32_t dex_pc = kNoDexPc) {
605    // Try to find an existing constant of the given value.
606    InstructionType* constant = nullptr;
607    auto cached_constant = cache->find(value);
608    if (cached_constant != cache->end()) {
609      constant = cached_constant->second;
610    }
611
612    // If not found or previously deleted, create and cache a new instruction.
613    // Don't bother reviving a previously deleted instruction, for simplicity.
614    if (constant == nullptr || constant->GetBlock() == nullptr) {
615      constant = new (arena_) InstructionType(value, dex_pc);
616      cache->Overwrite(value, constant);
617      InsertConstant(constant);
618    }
619    return constant;
620  }
621
622  void InsertConstant(HConstant* instruction);
623
624  // Cache a float constant into the graph. This method should only be
625  // called by the SsaBuilder when creating "equivalent" instructions.
626  void CacheFloatConstant(HFloatConstant* constant);
627
628  // See CacheFloatConstant comment.
629  void CacheDoubleConstant(HDoubleConstant* constant);
630
631  ArenaAllocator* const arena_;
632
633  // List of blocks in insertion order.
634  ArenaVector<HBasicBlock*> blocks_;
635
636  // List of blocks to perform a reverse post order tree traversal.
637  ArenaVector<HBasicBlock*> reverse_post_order_;
638
639  // List of blocks to perform a linear order tree traversal. Unlike the reverse
640  // post order, this order is not incrementally kept up-to-date.
641  ArenaVector<HBasicBlock*> linear_order_;
642
643  HBasicBlock* entry_block_;
644  HBasicBlock* exit_block_;
645
646  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
647  uint16_t maximum_number_of_out_vregs_;
648
649  // The number of virtual registers in this method. Contains the parameters.
650  uint16_t number_of_vregs_;
651
652  // The number of virtual registers used by parameters of this method.
653  uint16_t number_of_in_vregs_;
654
655  // Number of vreg size slots that the temporaries use (used in baseline compiler).
656  size_t temporaries_vreg_slots_;
657
658  // Flag whether there are bounds checks in the graph. We can skip
659  // BCE if it's false. It's only best effort to keep it up to date in
660  // the presence of code elimination so there might be false positives.
661  bool has_bounds_checks_;
662
663  // Flag whether there are try/catch blocks in the graph. We will skip
664  // try/catch-related passes if it's false. It's only best effort to keep
665  // it up to date in the presence of code elimination so there might be
666  // false positives.
667  bool has_try_catch_;
668
669  // Flag whether SIMD instructions appear in the graph. If true, the
670  // code generators may have to be more careful spilling the wider
671  // contents of SIMD registers.
672  bool has_simd_;
673
674  // Flag whether there are any loops in the graph. We can skip loop
675  // optimization if it's false. It's only best effort to keep it up
676  // to date in the presence of code elimination so there might be false
677  // positives.
678  bool has_loops_;
679
680  // Flag whether there are any irreducible loops in the graph. It's only
681  // best effort to keep it up to date in the presence of code elimination
682  // so there might be false positives.
683  bool has_irreducible_loops_;
684
685  // Indicates whether the graph should be compiled in a way that
686  // ensures full debuggability. If false, we can apply more
687  // aggressive optimizations that may limit the level of debugging.
688  const bool debuggable_;
689
690  // The current id to assign to a newly added instruction. See HInstruction.id_.
691  int32_t current_instruction_id_;
692
693  // The dex file from which the method is from.
694  const DexFile& dex_file_;
695
696  // The method index in the dex file.
697  const uint32_t method_idx_;
698
699  // If inlined, this encodes how the callee is being invoked.
700  const InvokeType invoke_type_;
701
702  // Whether the graph has been transformed to SSA form. Only used
703  // in debug mode to ensure we are not using properties only valid
704  // for non-SSA form (like the number of temporaries).
705  bool in_ssa_form_;
706
707  // Number of CHA guards in the graph. Used to short-circuit the
708  // CHA guard optimization pass when there is no CHA guard left.
709  uint32_t number_of_cha_guards_;
710
711  const InstructionSet instruction_set_;
712
713  // Cached constants.
714  HNullConstant* cached_null_constant_;
715  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
716  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
717  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
718  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
719
720  HCurrentMethod* cached_current_method_;
721
722  // The ArtMethod this graph is for. Note that for AOT, it may be null,
723  // for example for methods whose declaring class could not be resolved
724  // (such as when the superclass could not be found).
725  ArtMethod* art_method_;
726
727  // Keep the RTI of inexact Object to avoid having to pass stack handle
728  // collection pointer to passes which may create NullConstant.
729  ReferenceTypeInfo inexact_object_rti_;
730
731  // Whether we are compiling this graph for on stack replacement: this will
732  // make all loops seen as irreducible and emit special stack maps to mark
733  // compiled code entries which the interpreter can directly jump to.
734  const bool osr_;
735
736  // List of methods that are assumed to have single implementation.
737  ArenaSet<ArtMethod*> cha_single_implementation_list_;
738
739  friend class SsaBuilder;           // For caching constants.
740  friend class SsaLivenessAnalysis;  // For the linear order.
741  friend class HInliner;             // For the reverse post order.
742  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
743  DISALLOW_COPY_AND_ASSIGN(HGraph);
744};
745
746class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
747 public:
748  HLoopInformation(HBasicBlock* header, HGraph* graph)
749      : header_(header),
750        suspend_check_(nullptr),
751        irreducible_(false),
752        contains_irreducible_loop_(false),
753        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
754        // Make bit vector growable, as the number of blocks may change.
755        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
756    back_edges_.reserve(kDefaultNumberOfBackEdges);
757  }
758
759  bool IsIrreducible() const { return irreducible_; }
760  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
761
762  void Dump(std::ostream& os);
763
764  HBasicBlock* GetHeader() const {
765    return header_;
766  }
767
768  void SetHeader(HBasicBlock* block) {
769    header_ = block;
770  }
771
772  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
773  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
774  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
775
776  void AddBackEdge(HBasicBlock* back_edge) {
777    back_edges_.push_back(back_edge);
778  }
779
780  void RemoveBackEdge(HBasicBlock* back_edge) {
781    RemoveElement(back_edges_, back_edge);
782  }
783
784  bool IsBackEdge(const HBasicBlock& block) const {
785    return ContainsElement(back_edges_, &block);
786  }
787
788  size_t NumberOfBackEdges() const {
789    return back_edges_.size();
790  }
791
792  HBasicBlock* GetPreHeader() const;
793
794  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
795    return back_edges_;
796  }
797
798  // Returns the lifetime position of the back edge that has the
799  // greatest lifetime position.
800  size_t GetLifetimeEnd() const;
801
802  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
803    ReplaceElement(back_edges_, existing, new_back_edge);
804  }
805
806  // Finds blocks that are part of this loop.
807  void Populate();
808
809  // Returns whether this loop information contains `block`.
810  // Note that this loop information *must* be populated before entering this function.
811  bool Contains(const HBasicBlock& block) const;
812
813  // Returns whether this loop information is an inner loop of `other`.
814  // Note that `other` *must* be populated before entering this function.
815  bool IsIn(const HLoopInformation& other) const;
816
817  // Returns true if instruction is not defined within this loop.
818  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
819
820  const ArenaBitVector& GetBlocks() const { return blocks_; }
821
822  void Add(HBasicBlock* block);
823  void Remove(HBasicBlock* block);
824
825  void ClearAllBlocks() {
826    blocks_.ClearAllBits();
827  }
828
829  bool HasBackEdgeNotDominatedByHeader() const;
830
831  bool IsPopulated() const {
832    return blocks_.GetHighestBitSet() != -1;
833  }
834
835  bool DominatesAllBackEdges(HBasicBlock* block);
836
837  bool HasExitEdge() const;
838
839 private:
840  // Internal recursive implementation of `Populate`.
841  void PopulateRecursive(HBasicBlock* block);
842  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
843
844  HBasicBlock* header_;
845  HSuspendCheck* suspend_check_;
846  bool irreducible_;
847  bool contains_irreducible_loop_;
848  ArenaVector<HBasicBlock*> back_edges_;
849  ArenaBitVector blocks_;
850
851  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
852};
853
854// Stores try/catch information for basic blocks.
855// Note that HGraph is constructed so that catch blocks cannot simultaneously
856// be try blocks.
857class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
858 public:
859  // Try block information constructor.
860  explicit TryCatchInformation(const HTryBoundary& try_entry)
861      : try_entry_(&try_entry),
862        catch_dex_file_(nullptr),
863        catch_type_index_(DexFile::kDexNoIndex16) {
864    DCHECK(try_entry_ != nullptr);
865  }
866
867  // Catch block information constructor.
868  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
869      : try_entry_(nullptr),
870        catch_dex_file_(&dex_file),
871        catch_type_index_(catch_type_index) {}
872
873  bool IsTryBlock() const { return try_entry_ != nullptr; }
874
875  const HTryBoundary& GetTryEntry() const {
876    DCHECK(IsTryBlock());
877    return *try_entry_;
878  }
879
880  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
881
882  bool IsCatchAllTypeIndex() const {
883    DCHECK(IsCatchBlock());
884    return !catch_type_index_.IsValid();
885  }
886
887  dex::TypeIndex GetCatchTypeIndex() const {
888    DCHECK(IsCatchBlock());
889    return catch_type_index_;
890  }
891
892  const DexFile& GetCatchDexFile() const {
893    DCHECK(IsCatchBlock());
894    return *catch_dex_file_;
895  }
896
897 private:
898  // One of possibly several TryBoundary instructions entering the block's try.
899  // Only set for try blocks.
900  const HTryBoundary* try_entry_;
901
902  // Exception type information. Only set for catch blocks.
903  const DexFile* catch_dex_file_;
904  const dex::TypeIndex catch_type_index_;
905};
906
907static constexpr size_t kNoLifetime = -1;
908static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
909
910// A block in a method. Contains the list of instructions represented
911// as a double linked list. Each block knows its predecessors and
912// successors.
913
914class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
915 public:
916  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
917      : graph_(graph),
918        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
919        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
920        loop_information_(nullptr),
921        dominator_(nullptr),
922        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
923        block_id_(kInvalidBlockId),
924        dex_pc_(dex_pc),
925        lifetime_start_(kNoLifetime),
926        lifetime_end_(kNoLifetime),
927        try_catch_information_(nullptr) {
928    predecessors_.reserve(kDefaultNumberOfPredecessors);
929    successors_.reserve(kDefaultNumberOfSuccessors);
930    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
931  }
932
933  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
934    return predecessors_;
935  }
936
937  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
938    return successors_;
939  }
940
941  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
942  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
943
944  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
945    return ContainsElement(successors_, block, start_from);
946  }
947
948  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
949    return dominated_blocks_;
950  }
951
952  bool IsEntryBlock() const {
953    return graph_->GetEntryBlock() == this;
954  }
955
956  bool IsExitBlock() const {
957    return graph_->GetExitBlock() == this;
958  }
959
960  bool IsSingleGoto() const;
961  bool IsSingleTryBoundary() const;
962
963  // Returns true if this block emits nothing but a jump.
964  bool IsSingleJump() const {
965    HLoopInformation* loop_info = GetLoopInformation();
966    return (IsSingleGoto() || IsSingleTryBoundary())
967           // Back edges generate a suspend check.
968           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
969  }
970
971  void AddBackEdge(HBasicBlock* back_edge) {
972    if (loop_information_ == nullptr) {
973      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
974    }
975    DCHECK_EQ(loop_information_->GetHeader(), this);
976    loop_information_->AddBackEdge(back_edge);
977  }
978
979  HGraph* GetGraph() const { return graph_; }
980  void SetGraph(HGraph* graph) { graph_ = graph; }
981
982  uint32_t GetBlockId() const { return block_id_; }
983  void SetBlockId(int id) { block_id_ = id; }
984  uint32_t GetDexPc() const { return dex_pc_; }
985
986  HBasicBlock* GetDominator() const { return dominator_; }
987  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
988  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
989
990  void RemoveDominatedBlock(HBasicBlock* block) {
991    RemoveElement(dominated_blocks_, block);
992  }
993
994  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
995    ReplaceElement(dominated_blocks_, existing, new_block);
996  }
997
998  void ClearDominanceInformation();
999
1000  int NumberOfBackEdges() const {
1001    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1002  }
1003
1004  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1005  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1006  const HInstructionList& GetInstructions() const { return instructions_; }
1007  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1008  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1009  const HInstructionList& GetPhis() const { return phis_; }
1010
1011  HInstruction* GetFirstInstructionDisregardMoves() const;
1012
1013  void AddSuccessor(HBasicBlock* block) {
1014    successors_.push_back(block);
1015    block->predecessors_.push_back(this);
1016  }
1017
1018  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1019    size_t successor_index = GetSuccessorIndexOf(existing);
1020    existing->RemovePredecessor(this);
1021    new_block->predecessors_.push_back(this);
1022    successors_[successor_index] = new_block;
1023  }
1024
1025  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1026    size_t predecessor_index = GetPredecessorIndexOf(existing);
1027    existing->RemoveSuccessor(this);
1028    new_block->successors_.push_back(this);
1029    predecessors_[predecessor_index] = new_block;
1030  }
1031
1032  // Insert `this` between `predecessor` and `successor. This method
1033  // preserves the indicies, and will update the first edge found between
1034  // `predecessor` and `successor`.
1035  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1036    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1037    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1038    successor->predecessors_[predecessor_index] = this;
1039    predecessor->successors_[successor_index] = this;
1040    successors_.push_back(successor);
1041    predecessors_.push_back(predecessor);
1042  }
1043
1044  void RemovePredecessor(HBasicBlock* block) {
1045    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1046  }
1047
1048  void RemoveSuccessor(HBasicBlock* block) {
1049    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1050  }
1051
1052  void ClearAllPredecessors() {
1053    predecessors_.clear();
1054  }
1055
1056  void AddPredecessor(HBasicBlock* block) {
1057    predecessors_.push_back(block);
1058    block->successors_.push_back(this);
1059  }
1060
1061  void SwapPredecessors() {
1062    DCHECK_EQ(predecessors_.size(), 2u);
1063    std::swap(predecessors_[0], predecessors_[1]);
1064  }
1065
1066  void SwapSuccessors() {
1067    DCHECK_EQ(successors_.size(), 2u);
1068    std::swap(successors_[0], successors_[1]);
1069  }
1070
1071  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1072    return IndexOfElement(predecessors_, predecessor);
1073  }
1074
1075  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1076    return IndexOfElement(successors_, successor);
1077  }
1078
1079  HBasicBlock* GetSinglePredecessor() const {
1080    DCHECK_EQ(GetPredecessors().size(), 1u);
1081    return GetPredecessors()[0];
1082  }
1083
1084  HBasicBlock* GetSingleSuccessor() const {
1085    DCHECK_EQ(GetSuccessors().size(), 1u);
1086    return GetSuccessors()[0];
1087  }
1088
1089  // Returns whether the first occurrence of `predecessor` in the list of
1090  // predecessors is at index `idx`.
1091  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1092    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1093    return GetPredecessorIndexOf(predecessor) == idx;
1094  }
1095
1096  // Create a new block between this block and its predecessors. The new block
1097  // is added to the graph, all predecessor edges are relinked to it and an edge
1098  // is created to `this`. Returns the new empty block. Reverse post order or
1099  // loop and try/catch information are not updated.
1100  HBasicBlock* CreateImmediateDominator();
1101
1102  // Split the block into two blocks just before `cursor`. Returns the newly
1103  // created, latter block. Note that this method will add the block to the
1104  // graph, create a Goto at the end of the former block and will create an edge
1105  // between the blocks. It will not, however, update the reverse post order or
1106  // loop and try/catch information.
1107  HBasicBlock* SplitBefore(HInstruction* cursor);
1108
1109  // Split the block into two blocks just before `cursor`. Returns the newly
1110  // created block. Note that this method just updates raw block information,
1111  // like predecessors, successors, dominators, and instruction list. It does not
1112  // update the graph, reverse post order, loop information, nor make sure the
1113  // blocks are consistent (for example ending with a control flow instruction).
1114  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1115
1116  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1117  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1118
1119  // Merge `other` at the end of `this`. Successors and dominated blocks of
1120  // `other` are changed to be successors and dominated blocks of `this`. Note
1121  // that this method does not update the graph, reverse post order, loop
1122  // information, nor make sure the blocks are consistent (for example ending
1123  // with a control flow instruction).
1124  void MergeWithInlined(HBasicBlock* other);
1125
1126  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1127  // of `this` are moved to `other`.
1128  // Note that this method does not update the graph, reverse post order, loop
1129  // information, nor make sure the blocks are consistent (for example ending
1130  // with a control flow instruction).
1131  void ReplaceWith(HBasicBlock* other);
1132
1133  // Merges the instructions of `other` at the end of `this`.
1134  void MergeInstructionsWith(HBasicBlock* other);
1135
1136  // Merge `other` at the end of `this`. This method updates loops, reverse post
1137  // order, links to predecessors, successors, dominators and deletes the block
1138  // from the graph. The two blocks must be successive, i.e. `this` the only
1139  // predecessor of `other` and vice versa.
1140  void MergeWith(HBasicBlock* other);
1141
1142  // Disconnects `this` from all its predecessors, successors and dominator,
1143  // removes it from all loops it is included in and eventually from the graph.
1144  // The block must not dominate any other block. Predecessors and successors
1145  // are safely updated.
1146  void DisconnectAndDelete();
1147
1148  void AddInstruction(HInstruction* instruction);
1149  // Insert `instruction` before/after an existing instruction `cursor`.
1150  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1151  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1152  // Replace instruction `initial` with `replacement` within this block.
1153  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1154                                       HInstruction* replacement);
1155  void AddPhi(HPhi* phi);
1156  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1157  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1158  // instruction list. With 'ensure_safety' set to true, it verifies that the
1159  // instruction is not in use and removes it from the use lists of its inputs.
1160  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1161  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1162  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1163
1164  bool IsLoopHeader() const {
1165    return IsInLoop() && (loop_information_->GetHeader() == this);
1166  }
1167
1168  bool IsLoopPreHeaderFirstPredecessor() const {
1169    DCHECK(IsLoopHeader());
1170    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1171  }
1172
1173  bool IsFirstPredecessorBackEdge() const {
1174    DCHECK(IsLoopHeader());
1175    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1176  }
1177
1178  HLoopInformation* GetLoopInformation() const {
1179    return loop_information_;
1180  }
1181
1182  // Set the loop_information_ on this block. Overrides the current
1183  // loop_information if it is an outer loop of the passed loop information.
1184  // Note that this method is called while creating the loop information.
1185  void SetInLoop(HLoopInformation* info) {
1186    if (IsLoopHeader()) {
1187      // Nothing to do. This just means `info` is an outer loop.
1188    } else if (!IsInLoop()) {
1189      loop_information_ = info;
1190    } else if (loop_information_->Contains(*info->GetHeader())) {
1191      // Block is currently part of an outer loop. Make it part of this inner loop.
1192      // Note that a non loop header having a loop information means this loop information
1193      // has already been populated
1194      loop_information_ = info;
1195    } else {
1196      // Block is part of an inner loop. Do not update the loop information.
1197      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1198      // at this point, because this method is being called while populating `info`.
1199    }
1200  }
1201
1202  // Raw update of the loop information.
1203  void SetLoopInformation(HLoopInformation* info) {
1204    loop_information_ = info;
1205  }
1206
1207  bool IsInLoop() const { return loop_information_ != nullptr; }
1208
1209  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1210
1211  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1212    try_catch_information_ = try_catch_information;
1213  }
1214
1215  bool IsTryBlock() const {
1216    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1217  }
1218
1219  bool IsCatchBlock() const {
1220    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1221  }
1222
1223  // Returns the try entry that this block's successors should have. They will
1224  // be in the same try, unless the block ends in a try boundary. In that case,
1225  // the appropriate try entry will be returned.
1226  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1227
1228  bool HasThrowingInstructions() const;
1229
1230  // Returns whether this block dominates the blocked passed as parameter.
1231  bool Dominates(HBasicBlock* block) const;
1232
1233  size_t GetLifetimeStart() const { return lifetime_start_; }
1234  size_t GetLifetimeEnd() const { return lifetime_end_; }
1235
1236  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1237  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1238
1239  bool EndsWithControlFlowInstruction() const;
1240  bool EndsWithIf() const;
1241  bool EndsWithTryBoundary() const;
1242  bool HasSinglePhi() const;
1243
1244 private:
1245  HGraph* graph_;
1246  ArenaVector<HBasicBlock*> predecessors_;
1247  ArenaVector<HBasicBlock*> successors_;
1248  HInstructionList instructions_;
1249  HInstructionList phis_;
1250  HLoopInformation* loop_information_;
1251  HBasicBlock* dominator_;
1252  ArenaVector<HBasicBlock*> dominated_blocks_;
1253  uint32_t block_id_;
1254  // The dex program counter of the first instruction of this block.
1255  const uint32_t dex_pc_;
1256  size_t lifetime_start_;
1257  size_t lifetime_end_;
1258  TryCatchInformation* try_catch_information_;
1259
1260  friend class HGraph;
1261  friend class HInstruction;
1262
1263  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1264};
1265
1266// Iterates over the LoopInformation of all loops which contain 'block'
1267// from the innermost to the outermost.
1268class HLoopInformationOutwardIterator : public ValueObject {
1269 public:
1270  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1271      : current_(block.GetLoopInformation()) {}
1272
1273  bool Done() const { return current_ == nullptr; }
1274
1275  void Advance() {
1276    DCHECK(!Done());
1277    current_ = current_->GetPreHeader()->GetLoopInformation();
1278  }
1279
1280  HLoopInformation* Current() const {
1281    DCHECK(!Done());
1282    return current_;
1283  }
1284
1285 private:
1286  HLoopInformation* current_;
1287
1288  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1289};
1290
1291#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1292  M(Above, Condition)                                                   \
1293  M(AboveOrEqual, Condition)                                            \
1294  M(Add, BinaryOperation)                                               \
1295  M(And, BinaryOperation)                                               \
1296  M(ArrayGet, Instruction)                                              \
1297  M(ArrayLength, Instruction)                                           \
1298  M(ArraySet, Instruction)                                              \
1299  M(Below, Condition)                                                   \
1300  M(BelowOrEqual, Condition)                                            \
1301  M(BooleanNot, UnaryOperation)                                         \
1302  M(BoundsCheck, Instruction)                                           \
1303  M(BoundType, Instruction)                                             \
1304  M(CheckCast, Instruction)                                             \
1305  M(ClassTableGet, Instruction)                                         \
1306  M(ClearException, Instruction)                                        \
1307  M(ClinitCheck, Instruction)                                           \
1308  M(Compare, BinaryOperation)                                           \
1309  M(ConstructorFence, Instruction)                                      \
1310  M(CurrentMethod, Instruction)                                         \
1311  M(ShouldDeoptimizeFlag, Instruction)                                  \
1312  M(Deoptimize, Instruction)                                            \
1313  M(Div, BinaryOperation)                                               \
1314  M(DivZeroCheck, Instruction)                                          \
1315  M(DoubleConstant, Constant)                                           \
1316  M(Equal, Condition)                                                   \
1317  M(Exit, Instruction)                                                  \
1318  M(FloatConstant, Constant)                                            \
1319  M(Goto, Instruction)                                                  \
1320  M(GreaterThan, Condition)                                             \
1321  M(GreaterThanOrEqual, Condition)                                      \
1322  M(If, Instruction)                                                    \
1323  M(InstanceFieldGet, Instruction)                                      \
1324  M(InstanceFieldSet, Instruction)                                      \
1325  M(InstanceOf, Instruction)                                            \
1326  M(IntConstant, Constant)                                              \
1327  M(InvokeUnresolved, Invoke)                                           \
1328  M(InvokeInterface, Invoke)                                            \
1329  M(InvokeStaticOrDirect, Invoke)                                       \
1330  M(InvokeVirtual, Invoke)                                              \
1331  M(InvokePolymorphic, Invoke)                                          \
1332  M(LessThan, Condition)                                                \
1333  M(LessThanOrEqual, Condition)                                         \
1334  M(LoadClass, Instruction)                                             \
1335  M(LoadException, Instruction)                                         \
1336  M(LoadString, Instruction)                                            \
1337  M(LongConstant, Constant)                                             \
1338  M(MemoryBarrier, Instruction)                                         \
1339  M(MonitorOperation, Instruction)                                      \
1340  M(Mul, BinaryOperation)                                               \
1341  M(NativeDebugInfo, Instruction)                                       \
1342  M(Neg, UnaryOperation)                                                \
1343  M(NewArray, Instruction)                                              \
1344  M(NewInstance, Instruction)                                           \
1345  M(Not, UnaryOperation)                                                \
1346  M(NotEqual, Condition)                                                \
1347  M(NullConstant, Instruction)                                          \
1348  M(NullCheck, Instruction)                                             \
1349  M(Or, BinaryOperation)                                                \
1350  M(PackedSwitch, Instruction)                                          \
1351  M(ParallelMove, Instruction)                                          \
1352  M(ParameterValue, Instruction)                                        \
1353  M(Phi, Instruction)                                                   \
1354  M(Rem, BinaryOperation)                                               \
1355  M(Return, Instruction)                                                \
1356  M(ReturnVoid, Instruction)                                            \
1357  M(Ror, BinaryOperation)                                               \
1358  M(Shl, BinaryOperation)                                               \
1359  M(Shr, BinaryOperation)                                               \
1360  M(StaticFieldGet, Instruction)                                        \
1361  M(StaticFieldSet, Instruction)                                        \
1362  M(UnresolvedInstanceFieldGet, Instruction)                            \
1363  M(UnresolvedInstanceFieldSet, Instruction)                            \
1364  M(UnresolvedStaticFieldGet, Instruction)                              \
1365  M(UnresolvedStaticFieldSet, Instruction)                              \
1366  M(Select, Instruction)                                                \
1367  M(Sub, BinaryOperation)                                               \
1368  M(SuspendCheck, Instruction)                                          \
1369  M(Throw, Instruction)                                                 \
1370  M(TryBoundary, Instruction)                                           \
1371  M(TypeConversion, Instruction)                                        \
1372  M(UShr, BinaryOperation)                                              \
1373  M(Xor, BinaryOperation)                                               \
1374  M(VecReplicateScalar, VecUnaryOperation)                              \
1375  M(VecSumReduce, VecUnaryOperation)                                    \
1376  M(VecCnv, VecUnaryOperation)                                          \
1377  M(VecNeg, VecUnaryOperation)                                          \
1378  M(VecAbs, VecUnaryOperation)                                          \
1379  M(VecNot, VecUnaryOperation)                                          \
1380  M(VecAdd, VecBinaryOperation)                                         \
1381  M(VecHalvingAdd, VecBinaryOperation)                                  \
1382  M(VecSub, VecBinaryOperation)                                         \
1383  M(VecMul, VecBinaryOperation)                                         \
1384  M(VecDiv, VecBinaryOperation)                                         \
1385  M(VecMin, VecBinaryOperation)                                         \
1386  M(VecMax, VecBinaryOperation)                                         \
1387  M(VecAnd, VecBinaryOperation)                                         \
1388  M(VecAndNot, VecBinaryOperation)                                      \
1389  M(VecOr, VecBinaryOperation)                                          \
1390  M(VecXor, VecBinaryOperation)                                         \
1391  M(VecShl, VecBinaryOperation)                                         \
1392  M(VecShr, VecBinaryOperation)                                         \
1393  M(VecUShr, VecBinaryOperation)                                        \
1394  M(VecSetScalars, VecOperation)                                        \
1395  M(VecMultiplyAccumulate, VecOperation)                                \
1396  M(VecLoad, VecMemoryOperation)                                        \
1397  M(VecStore, VecMemoryOperation)                                       \
1398
1399/*
1400 * Instructions, shared across several (not all) architectures.
1401 */
1402#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1403#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1404#else
1405#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1406  M(BitwiseNegatedRight, Instruction)                                   \
1407  M(DataProcWithShifterOp, Instruction)                                 \
1408  M(MultiplyAccumulate, Instruction)                                    \
1409  M(IntermediateAddress, Instruction)                                   \
1410  M(IntermediateAddressIndex, Instruction)
1411#endif
1412
1413#ifndef ART_ENABLE_CODEGEN_arm
1414#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1415#else
1416#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1417  M(ArmDexCacheArraysBase, Instruction)
1418#endif
1419
1420#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1421
1422#ifndef ART_ENABLE_CODEGEN_mips
1423#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1424#else
1425#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1426  M(MipsComputeBaseMethodAddress, Instruction)                          \
1427  M(MipsDexCacheArraysBase, Instruction)                                \
1428  M(MipsPackedSwitch, Instruction)
1429#endif
1430
1431#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1432
1433#ifndef ART_ENABLE_CODEGEN_x86
1434#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1435#else
1436#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1437  M(X86ComputeBaseMethodAddress, Instruction)                           \
1438  M(X86LoadFromConstantTable, Instruction)                              \
1439  M(X86FPNeg, Instruction)                                              \
1440  M(X86PackedSwitch, Instruction)
1441#endif
1442
1443#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1444
1445#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1446  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1447  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1448  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1449  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1450  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1451  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1452  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1453  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1454
1455#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1456  M(Condition, BinaryOperation)                                         \
1457  M(Constant, Instruction)                                              \
1458  M(UnaryOperation, Instruction)                                        \
1459  M(BinaryOperation, Instruction)                                       \
1460  M(Invoke, Instruction)                                                \
1461  M(VecOperation, Instruction)                                          \
1462  M(VecUnaryOperation, VecOperation)                                    \
1463  M(VecBinaryOperation, VecOperation)                                   \
1464  M(VecMemoryOperation, VecOperation)
1465
1466#define FOR_EACH_INSTRUCTION(M)                                         \
1467  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1468  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1469
1470#define FORWARD_DECLARATION(type, super) class H##type;
1471FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1472#undef FORWARD_DECLARATION
1473
1474#define DECLARE_INSTRUCTION(type)                                         \
1475  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1476  const char* DebugName() const OVERRIDE { return #type; }                \
1477  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1478    return other->Is##type();                                             \
1479  }                                                                       \
1480  void Accept(HGraphVisitor* visitor) OVERRIDE
1481
1482#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1483  bool Is##type() const { return As##type() != nullptr; }               \
1484  const H##type* As##type() const { return this; }                      \
1485  H##type* As##type() { return this; }
1486
1487template <typename T>
1488class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1489 public:
1490  // Get the instruction which has this use as one of the inputs.
1491  T GetUser() const { return user_; }
1492  // Get the position of the input record that this use corresponds to.
1493  size_t GetIndex() const { return index_; }
1494  // Set the position of the input record that this use corresponds to.
1495  void SetIndex(size_t index) { index_ = index; }
1496
1497  // Hook for the IntrusiveForwardList<>.
1498  // TODO: Hide this better.
1499  IntrusiveForwardListHook hook;
1500
1501 private:
1502  HUseListNode(T user, size_t index)
1503      : user_(user), index_(index) {}
1504
1505  T const user_;
1506  size_t index_;
1507
1508  friend class HInstruction;
1509
1510  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1511};
1512
1513template <typename T>
1514using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1515
1516// This class is used by HEnvironment and HInstruction classes to record the
1517// instructions they use and pointers to the corresponding HUseListNodes kept
1518// by the used instructions.
1519template <typename T>
1520class HUserRecord : public ValueObject {
1521 public:
1522  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1523  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1524
1525  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1526      : HUserRecord(old_record.instruction_, before_use_node) {}
1527  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1528      : instruction_(instruction), before_use_node_(before_use_node) {
1529    DCHECK(instruction_ != nullptr);
1530  }
1531
1532  HInstruction* GetInstruction() const { return instruction_; }
1533  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1534  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1535
1536 private:
1537  // Instruction used by the user.
1538  HInstruction* instruction_;
1539
1540  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1541  typename HUseList<T>::iterator before_use_node_;
1542};
1543
1544// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1545// This is used for HInstruction::GetInputs() to return a container wrapper providing
1546// HInstruction* values even though the underlying container has HUserRecord<>s.
1547struct HInputExtractor {
1548  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1549    return record.GetInstruction();
1550  }
1551  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1552    return record.GetInstruction();
1553  }
1554};
1555
1556using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1557using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1558
1559/**
1560 * Side-effects representation.
1561 *
1562 * For write/read dependences on fields/arrays, the dependence analysis uses
1563 * type disambiguation (e.g. a float field write cannot modify the value of an
1564 * integer field read) and the access type (e.g.  a reference array write cannot
1565 * modify the value of a reference field read [although it may modify the
1566 * reference fetch prior to reading the field, which is represented by its own
1567 * write/read dependence]). The analysis makes conservative points-to
1568 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1569 * the same, and any reference read depends on any reference read without
1570 * further regard of its type).
1571 *
1572 * The internal representation uses 38-bit and is described in the table below.
1573 * The first line indicates the side effect, and for field/array accesses the
1574 * second line indicates the type of the access (in the order of the
1575 * Primitive::Type enum).
1576 * The two numbered lines below indicate the bit position in the bitfield (read
1577 * vertically).
1578 *
1579 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1580 *   +-------------+---------+---------+--------------+---------+---------+
1581 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1582 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1583 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1584 *
1585 * Note that, to ease the implementation, 'changes' bits are least significant
1586 * bits, while 'dependency' bits are most significant bits.
1587 */
1588class SideEffects : public ValueObject {
1589 public:
1590  SideEffects() : flags_(0) {}
1591
1592  static SideEffects None() {
1593    return SideEffects(0);
1594  }
1595
1596  static SideEffects All() {
1597    return SideEffects(kAllChangeBits | kAllDependOnBits);
1598  }
1599
1600  static SideEffects AllChanges() {
1601    return SideEffects(kAllChangeBits);
1602  }
1603
1604  static SideEffects AllDependencies() {
1605    return SideEffects(kAllDependOnBits);
1606  }
1607
1608  static SideEffects AllExceptGCDependency() {
1609    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1610  }
1611
1612  static SideEffects AllWritesAndReads() {
1613    return SideEffects(kAllWrites | kAllReads);
1614  }
1615
1616  static SideEffects AllWrites() {
1617    return SideEffects(kAllWrites);
1618  }
1619
1620  static SideEffects AllReads() {
1621    return SideEffects(kAllReads);
1622  }
1623
1624  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1625    return is_volatile
1626        ? AllWritesAndReads()
1627        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1628  }
1629
1630  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1631    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1632  }
1633
1634  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1635    return is_volatile
1636        ? AllWritesAndReads()
1637        : SideEffects(TypeFlag(type, kFieldReadOffset));
1638  }
1639
1640  static SideEffects ArrayReadOfType(Primitive::Type type) {
1641    return SideEffects(TypeFlag(type, kArrayReadOffset));
1642  }
1643
1644  static SideEffects CanTriggerGC() {
1645    return SideEffects(1ULL << kCanTriggerGCBit);
1646  }
1647
1648  static SideEffects DependsOnGC() {
1649    return SideEffects(1ULL << kDependsOnGCBit);
1650  }
1651
1652  // Combines the side-effects of this and the other.
1653  SideEffects Union(SideEffects other) const {
1654    return SideEffects(flags_ | other.flags_);
1655  }
1656
1657  SideEffects Exclusion(SideEffects other) const {
1658    return SideEffects(flags_ & ~other.flags_);
1659  }
1660
1661  void Add(SideEffects other) {
1662    flags_ |= other.flags_;
1663  }
1664
1665  bool Includes(SideEffects other) const {
1666    return (other.flags_ & flags_) == other.flags_;
1667  }
1668
1669  bool HasSideEffects() const {
1670    return (flags_ & kAllChangeBits);
1671  }
1672
1673  bool HasDependencies() const {
1674    return (flags_ & kAllDependOnBits);
1675  }
1676
1677  // Returns true if there are no side effects or dependencies.
1678  bool DoesNothing() const {
1679    return flags_ == 0;
1680  }
1681
1682  // Returns true if something is written.
1683  bool DoesAnyWrite() const {
1684    return (flags_ & kAllWrites);
1685  }
1686
1687  // Returns true if something is read.
1688  bool DoesAnyRead() const {
1689    return (flags_ & kAllReads);
1690  }
1691
1692  // Returns true if potentially everything is written and read
1693  // (every type and every kind of access).
1694  bool DoesAllReadWrite() const {
1695    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1696  }
1697
1698  bool DoesAll() const {
1699    return flags_ == (kAllChangeBits | kAllDependOnBits);
1700  }
1701
1702  // Returns true if `this` may read something written by `other`.
1703  bool MayDependOn(SideEffects other) const {
1704    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1705    return (other.flags_ & depends_on_flags);
1706  }
1707
1708  // Returns string representation of flags (for debugging only).
1709  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1710  std::string ToString() const {
1711    std::string flags = "|";
1712    for (int s = kLastBit; s >= 0; s--) {
1713      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1714      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1715        // This is a bit for the GC side effect.
1716        if (current_bit_is_set) {
1717          flags += "GC";
1718        }
1719        flags += "|";
1720      } else {
1721        // This is a bit for the array/field analysis.
1722        // The underscore character stands for the 'can trigger GC' bit.
1723        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1724        if (current_bit_is_set) {
1725          flags += kDebug[s];
1726        }
1727        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1728            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1729          flags += "|";
1730        }
1731      }
1732    }
1733    return flags;
1734  }
1735
1736  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1737
1738 private:
1739  static constexpr int kFieldArrayAnalysisBits = 9;
1740
1741  static constexpr int kFieldWriteOffset = 0;
1742  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1743  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1744  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1745
1746  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1747
1748  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1749  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1750  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1751  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1752
1753  static constexpr int kLastBit = kDependsOnGCBit;
1754  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1755
1756  // Aliases.
1757
1758  static_assert(kChangeBits == kDependOnBits,
1759                "the 'change' bits should match the 'depend on' bits.");
1760
1761  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1762  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1763  static constexpr uint64_t kAllWrites =
1764      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1765  static constexpr uint64_t kAllReads =
1766      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1767
1768  // Translates type to bit flag.
1769  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1770    CHECK_NE(type, Primitive::kPrimVoid);
1771    const uint64_t one = 1;
1772    const int shift = type;  // 0-based consecutive enum
1773    DCHECK_LE(kFieldWriteOffset, shift);
1774    DCHECK_LT(shift, kArrayWriteOffset);
1775    return one << (type + offset);
1776  }
1777
1778  // Private constructor on direct flags value.
1779  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1780
1781  uint64_t flags_;
1782};
1783
1784// A HEnvironment object contains the values of virtual registers at a given location.
1785class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1786 public:
1787  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1788                             size_t number_of_vregs,
1789                             ArtMethod* method,
1790                             uint32_t dex_pc,
1791                             HInstruction* holder)
1792     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1793       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1794       parent_(nullptr),
1795       method_(method),
1796       dex_pc_(dex_pc),
1797       holder_(holder) {
1798  }
1799
1800  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1801      : HEnvironment(arena,
1802                     to_copy.Size(),
1803                     to_copy.GetMethod(),
1804                     to_copy.GetDexPc(),
1805                     holder) {}
1806
1807  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1808    if (parent_ != nullptr) {
1809      parent_->SetAndCopyParentChain(allocator, parent);
1810    } else {
1811      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1812      parent_->CopyFrom(parent);
1813      if (parent->GetParent() != nullptr) {
1814        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1815      }
1816    }
1817  }
1818
1819  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1820  void CopyFrom(HEnvironment* environment);
1821
1822  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1823  // input to the loop phi instead. This is for inserting instructions that
1824  // require an environment (like HDeoptimization) in the loop pre-header.
1825  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1826
1827  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1828    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1829  }
1830
1831  HInstruction* GetInstructionAt(size_t index) const {
1832    return vregs_[index].GetInstruction();
1833  }
1834
1835  void RemoveAsUserOfInput(size_t index) const;
1836
1837  size_t Size() const { return vregs_.size(); }
1838
1839  HEnvironment* GetParent() const { return parent_; }
1840
1841  void SetLocationAt(size_t index, Location location) {
1842    locations_[index] = location;
1843  }
1844
1845  Location GetLocationAt(size_t index) const {
1846    return locations_[index];
1847  }
1848
1849  uint32_t GetDexPc() const {
1850    return dex_pc_;
1851  }
1852
1853  ArtMethod* GetMethod() const {
1854    return method_;
1855  }
1856
1857  HInstruction* GetHolder() const {
1858    return holder_;
1859  }
1860
1861
1862  bool IsFromInlinedInvoke() const {
1863    return GetParent() != nullptr;
1864  }
1865
1866 private:
1867  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1868  ArenaVector<Location> locations_;
1869  HEnvironment* parent_;
1870  ArtMethod* method_;
1871  const uint32_t dex_pc_;
1872
1873  // The instruction that holds this environment.
1874  HInstruction* const holder_;
1875
1876  friend class HInstruction;
1877
1878  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1879};
1880
1881class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1882 public:
1883  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1884      : previous_(nullptr),
1885        next_(nullptr),
1886        block_(nullptr),
1887        dex_pc_(dex_pc),
1888        id_(-1),
1889        ssa_index_(-1),
1890        packed_fields_(0u),
1891        environment_(nullptr),
1892        locations_(nullptr),
1893        live_interval_(nullptr),
1894        lifetime_position_(kNoLifetime),
1895        side_effects_(side_effects),
1896        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1897    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1898  }
1899
1900  virtual ~HInstruction() {}
1901
1902#define DECLARE_KIND(type, super) k##type,
1903  enum InstructionKind {
1904    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1905  };
1906#undef DECLARE_KIND
1907
1908  HInstruction* GetNext() const { return next_; }
1909  HInstruction* GetPrevious() const { return previous_; }
1910
1911  HInstruction* GetNextDisregardingMoves() const;
1912  HInstruction* GetPreviousDisregardingMoves() const;
1913
1914  HBasicBlock* GetBlock() const { return block_; }
1915  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1916  void SetBlock(HBasicBlock* block) { block_ = block; }
1917  bool IsInBlock() const { return block_ != nullptr; }
1918  bool IsInLoop() const { return block_->IsInLoop(); }
1919  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1920  bool IsIrreducibleLoopHeaderPhi() const {
1921    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1922  }
1923
1924  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1925
1926  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1927    // One virtual method is enough, just const_cast<> and then re-add the const.
1928    return ArrayRef<const HUserRecord<HInstruction*>>(
1929        const_cast<HInstruction*>(this)->GetInputRecords());
1930  }
1931
1932  HInputsRef GetInputs() {
1933    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1934  }
1935
1936  HConstInputsRef GetInputs() const {
1937    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1938  }
1939
1940  size_t InputCount() const { return GetInputRecords().size(); }
1941  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1942
1943  bool HasInput(HInstruction* input) const {
1944    for (const HInstruction* i : GetInputs()) {
1945      if (i == input) {
1946        return true;
1947      }
1948    }
1949    return false;
1950  }
1951
1952  void SetRawInputAt(size_t index, HInstruction* input) {
1953    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1954  }
1955
1956  virtual void Accept(HGraphVisitor* visitor) = 0;
1957  virtual const char* DebugName() const = 0;
1958
1959  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1960
1961  virtual bool NeedsEnvironment() const { return false; }
1962
1963  uint32_t GetDexPc() const { return dex_pc_; }
1964
1965  virtual bool IsControlFlow() const { return false; }
1966
1967  // Can the instruction throw?
1968  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1969  // could throw OOME, but it is still OK to remove them if they are unused.
1970  virtual bool CanThrow() const { return false; }
1971  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1972
1973  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1974  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1975
1976  // Does not apply for all instructions, but having this at top level greatly
1977  // simplifies the null check elimination.
1978  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1979  virtual bool CanBeNull() const {
1980    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1981    return true;
1982  }
1983
1984  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1985    return false;
1986  }
1987
1988  virtual bool IsActualObject() const {
1989    return GetType() == Primitive::kPrimNot;
1990  }
1991
1992  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1993
1994  ReferenceTypeInfo GetReferenceTypeInfo() const {
1995    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1996    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1997                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1998  }
1999
2000  void AddUseAt(HInstruction* user, size_t index) {
2001    DCHECK(user != nullptr);
2002    // Note: fixup_end remains valid across push_front().
2003    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2004    HUseListNode<HInstruction*>* new_node =
2005        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
2006    uses_.push_front(*new_node);
2007    FixUpUserRecordsAfterUseInsertion(fixup_end);
2008  }
2009
2010  void AddEnvUseAt(HEnvironment* user, size_t index) {
2011    DCHECK(user != nullptr);
2012    // Note: env_fixup_end remains valid across push_front().
2013    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2014    HUseListNode<HEnvironment*>* new_node =
2015        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2016    env_uses_.push_front(*new_node);
2017    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2018  }
2019
2020  void RemoveAsUserOfInput(size_t input) {
2021    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2022    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2023    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2024    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2025  }
2026
2027  void RemoveAsUserOfAllInputs() {
2028    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2029      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2030      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2031      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2032    }
2033  }
2034
2035  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2036  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2037
2038  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2039  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2040  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2041  bool HasOnlyOneNonEnvironmentUse() const {
2042    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2043  }
2044
2045  bool IsRemovable() const {
2046    return
2047        !DoesAnyWrite() &&
2048        !CanThrow() &&
2049        !IsSuspendCheck() &&
2050        !IsControlFlow() &&
2051        !IsNativeDebugInfo() &&
2052        !IsParameterValue() &&
2053        // If we added an explicit barrier then we should keep it.
2054        !IsMemoryBarrier() &&
2055        !IsConstructorFence();
2056  }
2057
2058  bool IsDeadAndRemovable() const {
2059    return IsRemovable() && !HasUses();
2060  }
2061
2062  // Does this instruction strictly dominate `other_instruction`?
2063  // Returns false if this instruction and `other_instruction` are the same.
2064  // Aborts if this instruction and `other_instruction` are both phis.
2065  bool StrictlyDominates(HInstruction* other_instruction) const;
2066
2067  int GetId() const { return id_; }
2068  void SetId(int id) { id_ = id; }
2069
2070  int GetSsaIndex() const { return ssa_index_; }
2071  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2072  bool HasSsaIndex() const { return ssa_index_ != -1; }
2073
2074  bool HasEnvironment() const { return environment_ != nullptr; }
2075  HEnvironment* GetEnvironment() const { return environment_; }
2076  // Set the `environment_` field. Raw because this method does not
2077  // update the uses lists.
2078  void SetRawEnvironment(HEnvironment* environment) {
2079    DCHECK(environment_ == nullptr);
2080    DCHECK_EQ(environment->GetHolder(), this);
2081    environment_ = environment;
2082  }
2083
2084  void InsertRawEnvironment(HEnvironment* environment) {
2085    DCHECK(environment_ != nullptr);
2086    DCHECK_EQ(environment->GetHolder(), this);
2087    DCHECK(environment->GetParent() == nullptr);
2088    environment->parent_ = environment_;
2089    environment_ = environment;
2090  }
2091
2092  void RemoveEnvironment();
2093
2094  // Set the environment of this instruction, copying it from `environment`. While
2095  // copying, the uses lists are being updated.
2096  void CopyEnvironmentFrom(HEnvironment* environment) {
2097    DCHECK(environment_ == nullptr);
2098    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2099    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2100    environment_->CopyFrom(environment);
2101    if (environment->GetParent() != nullptr) {
2102      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2103    }
2104  }
2105
2106  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2107                                                HBasicBlock* block) {
2108    DCHECK(environment_ == nullptr);
2109    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2110    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2111    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2112    if (environment->GetParent() != nullptr) {
2113      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2114    }
2115  }
2116
2117  // Returns the number of entries in the environment. Typically, that is the
2118  // number of dex registers in a method. It could be more in case of inlining.
2119  size_t EnvironmentSize() const;
2120
2121  LocationSummary* GetLocations() const { return locations_; }
2122  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2123
2124  void ReplaceWith(HInstruction* instruction);
2125  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2126  void ReplaceInput(HInstruction* replacement, size_t index);
2127
2128  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2129  // uses of this instruction by `other` are *not* updated.
2130  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2131    ReplaceWith(other);
2132    other->ReplaceInput(this, use_index);
2133  }
2134
2135  // Move `this` instruction before `cursor`
2136  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2137
2138  // Move `this` before its first user and out of any loops. If there is no
2139  // out-of-loop user that dominates all other users, move the instruction
2140  // to the end of the out-of-loop common dominator of the user's blocks.
2141  //
2142  // This can be used only on non-throwing instructions with no side effects that
2143  // have at least one use but no environment uses.
2144  void MoveBeforeFirstUserAndOutOfLoops();
2145
2146#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2147  bool Is##type() const;                                                       \
2148  const H##type* As##type() const;                                             \
2149  H##type* As##type();
2150
2151  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2152#undef INSTRUCTION_TYPE_CHECK
2153
2154#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2155  bool Is##type() const { return (As##type() != nullptr); }                    \
2156  virtual const H##type* As##type() const { return nullptr; }                  \
2157  virtual H##type* As##type() { return nullptr; }
2158  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2159#undef INSTRUCTION_TYPE_CHECK
2160
2161  // Returns whether the instruction can be moved within the graph.
2162  // TODO: this method is used by LICM and GVN with possibly different
2163  //       meanings? split and rename?
2164  virtual bool CanBeMoved() const { return false; }
2165
2166  // Returns whether the two instructions are of the same kind.
2167  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2168    return false;
2169  }
2170
2171  // Returns whether any data encoded in the two instructions is equal.
2172  // This method does not look at the inputs. Both instructions must be
2173  // of the same type, otherwise the method has undefined behavior.
2174  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2175    return false;
2176  }
2177
2178  // Returns whether two instructions are equal, that is:
2179  // 1) They have the same type and contain the same data (InstructionDataEquals).
2180  // 2) Their inputs are identical.
2181  bool Equals(const HInstruction* other) const;
2182
2183  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2184  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2185  // the virtual function because the __attribute__((__pure__)) doesn't really
2186  // apply the strong requirement for virtual functions, preventing optimizations.
2187  InstructionKind GetKind() const PURE;
2188  virtual InstructionKind GetKindInternal() const = 0;
2189
2190  virtual size_t ComputeHashCode() const {
2191    size_t result = GetKind();
2192    for (const HInstruction* input : GetInputs()) {
2193      result = (result * 31) + input->GetId();
2194    }
2195    return result;
2196  }
2197
2198  SideEffects GetSideEffects() const { return side_effects_; }
2199  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2200  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2201
2202  size_t GetLifetimePosition() const { return lifetime_position_; }
2203  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2204  LiveInterval* GetLiveInterval() const { return live_interval_; }
2205  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2206  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2207
2208  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2209
2210  // Returns whether the code generation of the instruction will require to have access
2211  // to the current method. Such instructions are:
2212  // (1): Instructions that require an environment, as calling the runtime requires
2213  //      to walk the stack and have the current method stored at a specific stack address.
2214  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2215  //      to access the dex cache.
2216  bool NeedsCurrentMethod() const {
2217    return NeedsEnvironment() || IsCurrentMethod();
2218  }
2219
2220  // Returns whether the code generation of the instruction will require to have access
2221  // to the dex cache of the current method's declaring class via the current method.
2222  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2223
2224  // Does this instruction have any use in an environment before
2225  // control flow hits 'other'?
2226  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2227
2228  // Remove all references to environment uses of this instruction.
2229  // The caller must ensure that this is safe to do.
2230  void RemoveEnvironmentUsers();
2231
2232  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2233  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2234
2235 protected:
2236  // If set, the machine code for this instruction is assumed to be generated by
2237  // its users. Used by liveness analysis to compute use positions accordingly.
2238  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2239  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2240  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2241  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2242
2243  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2244    return GetInputRecords()[i];
2245  }
2246
2247  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2248    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2249    input_records[index] = input;
2250  }
2251
2252  uint32_t GetPackedFields() const {
2253    return packed_fields_;
2254  }
2255
2256  template <size_t flag>
2257  bool GetPackedFlag() const {
2258    return (packed_fields_ & (1u << flag)) != 0u;
2259  }
2260
2261  template <size_t flag>
2262  void SetPackedFlag(bool value = true) {
2263    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2264  }
2265
2266  template <typename BitFieldType>
2267  typename BitFieldType::value_type GetPackedField() const {
2268    return BitFieldType::Decode(packed_fields_);
2269  }
2270
2271  template <typename BitFieldType>
2272  void SetPackedField(typename BitFieldType::value_type value) {
2273    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2274    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2275  }
2276
2277 private:
2278  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2279    auto before_use_node = uses_.before_begin();
2280    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2281      HInstruction* user = use_node->GetUser();
2282      size_t input_index = use_node->GetIndex();
2283      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2284      before_use_node = use_node;
2285    }
2286  }
2287
2288  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2289    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2290    if (next != uses_.end()) {
2291      HInstruction* next_user = next->GetUser();
2292      size_t next_index = next->GetIndex();
2293      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2294      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2295    }
2296  }
2297
2298  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2299    auto before_env_use_node = env_uses_.before_begin();
2300    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2301      HEnvironment* user = env_use_node->GetUser();
2302      size_t input_index = env_use_node->GetIndex();
2303      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2304      before_env_use_node = env_use_node;
2305    }
2306  }
2307
2308  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2309    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2310    if (next != env_uses_.end()) {
2311      HEnvironment* next_user = next->GetUser();
2312      size_t next_index = next->GetIndex();
2313      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2314      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2315    }
2316  }
2317
2318  HInstruction* previous_;
2319  HInstruction* next_;
2320  HBasicBlock* block_;
2321  const uint32_t dex_pc_;
2322
2323  // An instruction gets an id when it is added to the graph.
2324  // It reflects creation order. A negative id means the instruction
2325  // has not been added to the graph.
2326  int id_;
2327
2328  // When doing liveness analysis, instructions that have uses get an SSA index.
2329  int ssa_index_;
2330
2331  // Packed fields.
2332  uint32_t packed_fields_;
2333
2334  // List of instructions that have this instruction as input.
2335  HUseList<HInstruction*> uses_;
2336
2337  // List of environments that contain this instruction.
2338  HUseList<HEnvironment*> env_uses_;
2339
2340  // The environment associated with this instruction. Not null if the instruction
2341  // might jump out of the method.
2342  HEnvironment* environment_;
2343
2344  // Set by the code generator.
2345  LocationSummary* locations_;
2346
2347  // Set by the liveness analysis.
2348  LiveInterval* live_interval_;
2349
2350  // Set by the liveness analysis, this is the position in a linear
2351  // order of blocks where this instruction's live interval start.
2352  size_t lifetime_position_;
2353
2354  SideEffects side_effects_;
2355
2356  // The reference handle part of the reference type info.
2357  // The IsExact() flag is stored in packed fields.
2358  // TODO: for primitive types this should be marked as invalid.
2359  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2360
2361  friend class GraphChecker;
2362  friend class HBasicBlock;
2363  friend class HEnvironment;
2364  friend class HGraph;
2365  friend class HInstructionList;
2366
2367  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2368};
2369std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2370
2371// Iterates over the instructions, while preserving the next instruction
2372// in case the current instruction gets removed from the list by the user
2373// of this iterator.
2374class HInstructionIterator : public ValueObject {
2375 public:
2376  explicit HInstructionIterator(const HInstructionList& instructions)
2377      : instruction_(instructions.first_instruction_) {
2378    next_ = Done() ? nullptr : instruction_->GetNext();
2379  }
2380
2381  bool Done() const { return instruction_ == nullptr; }
2382  HInstruction* Current() const { return instruction_; }
2383  void Advance() {
2384    instruction_ = next_;
2385    next_ = Done() ? nullptr : instruction_->GetNext();
2386  }
2387
2388 private:
2389  HInstruction* instruction_;
2390  HInstruction* next_;
2391
2392  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2393};
2394
2395// Iterates over the instructions without saving the next instruction,
2396// therefore handling changes in the graph potentially made by the user
2397// of this iterator.
2398class HInstructionIteratorHandleChanges : public ValueObject {
2399 public:
2400  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2401      : instruction_(instructions.first_instruction_) {
2402  }
2403
2404  bool Done() const { return instruction_ == nullptr; }
2405  HInstruction* Current() const { return instruction_; }
2406  void Advance() {
2407    instruction_ = instruction_->GetNext();
2408  }
2409
2410 private:
2411  HInstruction* instruction_;
2412
2413  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2414};
2415
2416
2417class HBackwardInstructionIterator : public ValueObject {
2418 public:
2419  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2420      : instruction_(instructions.last_instruction_) {
2421    next_ = Done() ? nullptr : instruction_->GetPrevious();
2422  }
2423
2424  bool Done() const { return instruction_ == nullptr; }
2425  HInstruction* Current() const { return instruction_; }
2426  void Advance() {
2427    instruction_ = next_;
2428    next_ = Done() ? nullptr : instruction_->GetPrevious();
2429  }
2430
2431 private:
2432  HInstruction* instruction_;
2433  HInstruction* next_;
2434
2435  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2436};
2437
2438class HVariableInputSizeInstruction : public HInstruction {
2439 public:
2440  using HInstruction::GetInputRecords;  // Keep the const version visible.
2441  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2442    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2443  }
2444
2445  void AddInput(HInstruction* input);
2446  void InsertInputAt(size_t index, HInstruction* input);
2447  void RemoveInputAt(size_t index);
2448
2449  // Removes all the inputs.
2450  // Also removes this instructions from each input's use list
2451  // (for non-environment uses only).
2452  void RemoveAllInputs();
2453
2454 protected:
2455  HVariableInputSizeInstruction(SideEffects side_effects,
2456                                uint32_t dex_pc,
2457                                ArenaAllocator* arena,
2458                                size_t number_of_inputs,
2459                                ArenaAllocKind kind)
2460      : HInstruction(side_effects, dex_pc),
2461        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2462
2463  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2464
2465 private:
2466  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2467};
2468
2469template<size_t N>
2470class HTemplateInstruction: public HInstruction {
2471 public:
2472  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2473      : HInstruction(side_effects, dex_pc), inputs_() {}
2474  virtual ~HTemplateInstruction() {}
2475
2476  using HInstruction::GetInputRecords;  // Keep the const version visible.
2477  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2478    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2479  }
2480
2481 private:
2482  std::array<HUserRecord<HInstruction*>, N> inputs_;
2483
2484  friend class SsaBuilder;
2485};
2486
2487// HTemplateInstruction specialization for N=0.
2488template<>
2489class HTemplateInstruction<0>: public HInstruction {
2490 public:
2491  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2492      : HInstruction(side_effects, dex_pc) {}
2493
2494  virtual ~HTemplateInstruction() {}
2495
2496  using HInstruction::GetInputRecords;  // Keep the const version visible.
2497  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2498    return ArrayRef<HUserRecord<HInstruction*>>();
2499  }
2500
2501 private:
2502  friend class SsaBuilder;
2503};
2504
2505template<intptr_t N>
2506class HExpression : public HTemplateInstruction<N> {
2507 public:
2508  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2509      : HTemplateInstruction<N>(side_effects, dex_pc) {
2510    this->template SetPackedField<TypeField>(type);
2511  }
2512  virtual ~HExpression() {}
2513
2514  Primitive::Type GetType() const OVERRIDE {
2515    return TypeField::Decode(this->GetPackedFields());
2516  }
2517
2518 protected:
2519  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2520  static constexpr size_t kFieldTypeSize =
2521      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2522  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2523  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2524                "Too many packed fields.");
2525  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2526};
2527
2528// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2529// instruction that branches to the exit block.
2530class HReturnVoid FINAL : public HTemplateInstruction<0> {
2531 public:
2532  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2533      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2534
2535  bool IsControlFlow() const OVERRIDE { return true; }
2536
2537  DECLARE_INSTRUCTION(ReturnVoid);
2538
2539 private:
2540  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2541};
2542
2543// Represents dex's RETURN opcodes. A HReturn is a control flow
2544// instruction that branches to the exit block.
2545class HReturn FINAL : public HTemplateInstruction<1> {
2546 public:
2547  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2548      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2549    SetRawInputAt(0, value);
2550  }
2551
2552  bool IsControlFlow() const OVERRIDE { return true; }
2553
2554  DECLARE_INSTRUCTION(Return);
2555
2556 private:
2557  DISALLOW_COPY_AND_ASSIGN(HReturn);
2558};
2559
2560class HPhi FINAL : public HVariableInputSizeInstruction {
2561 public:
2562  HPhi(ArenaAllocator* arena,
2563       uint32_t reg_number,
2564       size_t number_of_inputs,
2565       Primitive::Type type,
2566       uint32_t dex_pc = kNoDexPc)
2567      : HVariableInputSizeInstruction(
2568            SideEffects::None(),
2569            dex_pc,
2570            arena,
2571            number_of_inputs,
2572            kArenaAllocPhiInputs),
2573        reg_number_(reg_number) {
2574    SetPackedField<TypeField>(ToPhiType(type));
2575    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2576    // Phis are constructed live and marked dead if conflicting or unused.
2577    // Individual steps of SsaBuilder should assume that if a phi has been
2578    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2579    SetPackedFlag<kFlagIsLive>(true);
2580    SetPackedFlag<kFlagCanBeNull>(true);
2581  }
2582
2583  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2584  static Primitive::Type ToPhiType(Primitive::Type type) {
2585    return Primitive::PrimitiveKind(type);
2586  }
2587
2588  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2589
2590  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2591  void SetType(Primitive::Type new_type) {
2592    // Make sure that only valid type changes occur. The following are allowed:
2593    //  (1) int  -> float/ref (primitive type propagation),
2594    //  (2) long -> double (primitive type propagation).
2595    DCHECK(GetType() == new_type ||
2596           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2597           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2598           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2599    SetPackedField<TypeField>(new_type);
2600  }
2601
2602  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2603  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2604
2605  uint32_t GetRegNumber() const { return reg_number_; }
2606
2607  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2608  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2609  bool IsDead() const { return !IsLive(); }
2610  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2611
2612  bool IsVRegEquivalentOf(const HInstruction* other) const {
2613    return other != nullptr
2614        && other->IsPhi()
2615        && other->AsPhi()->GetBlock() == GetBlock()
2616        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2617  }
2618
2619  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2620  // An equivalent phi is a phi having the same dex register and type.
2621  // It assumes that phis with the same dex register are adjacent.
2622  HPhi* GetNextEquivalentPhiWithSameType() {
2623    HInstruction* next = GetNext();
2624    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2625      if (next->GetType() == GetType()) {
2626        return next->AsPhi();
2627      }
2628      next = next->GetNext();
2629    }
2630    return nullptr;
2631  }
2632
2633  DECLARE_INSTRUCTION(Phi);
2634
2635 private:
2636  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2637  static constexpr size_t kFieldTypeSize =
2638      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2639  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2640  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2641  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2642  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2643  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2644
2645  const uint32_t reg_number_;
2646
2647  DISALLOW_COPY_AND_ASSIGN(HPhi);
2648};
2649
2650// The exit instruction is the only instruction of the exit block.
2651// Instructions aborting the method (HThrow and HReturn) must branch to the
2652// exit block.
2653class HExit FINAL : public HTemplateInstruction<0> {
2654 public:
2655  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2656
2657  bool IsControlFlow() const OVERRIDE { return true; }
2658
2659  DECLARE_INSTRUCTION(Exit);
2660
2661 private:
2662  DISALLOW_COPY_AND_ASSIGN(HExit);
2663};
2664
2665// Jumps from one block to another.
2666class HGoto FINAL : public HTemplateInstruction<0> {
2667 public:
2668  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2669
2670  bool IsControlFlow() const OVERRIDE { return true; }
2671
2672  HBasicBlock* GetSuccessor() const {
2673    return GetBlock()->GetSingleSuccessor();
2674  }
2675
2676  DECLARE_INSTRUCTION(Goto);
2677
2678 private:
2679  DISALLOW_COPY_AND_ASSIGN(HGoto);
2680};
2681
2682class HConstant : public HExpression<0> {
2683 public:
2684  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2685      : HExpression(type, SideEffects::None(), dex_pc) {}
2686
2687  bool CanBeMoved() const OVERRIDE { return true; }
2688
2689  // Is this constant -1 in the arithmetic sense?
2690  virtual bool IsMinusOne() const { return false; }
2691  // Is this constant 0 in the arithmetic sense?
2692  virtual bool IsArithmeticZero() const { return false; }
2693  // Is this constant a 0-bit pattern?
2694  virtual bool IsZeroBitPattern() const { return false; }
2695  // Is this constant 1 in the arithmetic sense?
2696  virtual bool IsOne() const { return false; }
2697
2698  virtual uint64_t GetValueAsUint64() const = 0;
2699
2700  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2701
2702 private:
2703  DISALLOW_COPY_AND_ASSIGN(HConstant);
2704};
2705
2706class HNullConstant FINAL : public HConstant {
2707 public:
2708  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2709    return true;
2710  }
2711
2712  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2713
2714  size_t ComputeHashCode() const OVERRIDE { return 0; }
2715
2716  // The null constant representation is a 0-bit pattern.
2717  virtual bool IsZeroBitPattern() const { return true; }
2718
2719  DECLARE_INSTRUCTION(NullConstant);
2720
2721 private:
2722  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2723
2724  friend class HGraph;
2725  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2726};
2727
2728// Constants of the type int. Those can be from Dex instructions, or
2729// synthesized (for example with the if-eqz instruction).
2730class HIntConstant FINAL : public HConstant {
2731 public:
2732  int32_t GetValue() const { return value_; }
2733
2734  uint64_t GetValueAsUint64() const OVERRIDE {
2735    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2736  }
2737
2738  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2739    DCHECK(other->IsIntConstant()) << other->DebugName();
2740    return other->AsIntConstant()->value_ == value_;
2741  }
2742
2743  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2744
2745  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2746  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2747  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2748  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2749
2750  // Integer constants are used to encode Boolean values as well,
2751  // where 1 means true and 0 means false.
2752  bool IsTrue() const { return GetValue() == 1; }
2753  bool IsFalse() const { return GetValue() == 0; }
2754
2755  DECLARE_INSTRUCTION(IntConstant);
2756
2757 private:
2758  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2759      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2760  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2761      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2762
2763  const int32_t value_;
2764
2765  friend class HGraph;
2766  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2767  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2768  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2769};
2770
2771class HLongConstant FINAL : public HConstant {
2772 public:
2773  int64_t GetValue() const { return value_; }
2774
2775  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2776
2777  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2778    DCHECK(other->IsLongConstant()) << other->DebugName();
2779    return other->AsLongConstant()->value_ == value_;
2780  }
2781
2782  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2783
2784  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2785  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2786  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2787  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2788
2789  DECLARE_INSTRUCTION(LongConstant);
2790
2791 private:
2792  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2793      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2794
2795  const int64_t value_;
2796
2797  friend class HGraph;
2798  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2799};
2800
2801class HFloatConstant FINAL : public HConstant {
2802 public:
2803  float GetValue() const { return value_; }
2804
2805  uint64_t GetValueAsUint64() const OVERRIDE {
2806    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2807  }
2808
2809  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2810    DCHECK(other->IsFloatConstant()) << other->DebugName();
2811    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2812  }
2813
2814  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2815
2816  bool IsMinusOne() const OVERRIDE {
2817    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2818  }
2819  bool IsArithmeticZero() const OVERRIDE {
2820    return std::fpclassify(value_) == FP_ZERO;
2821  }
2822  bool IsArithmeticPositiveZero() const {
2823    return IsArithmeticZero() && !std::signbit(value_);
2824  }
2825  bool IsArithmeticNegativeZero() const {
2826    return IsArithmeticZero() && std::signbit(value_);
2827  }
2828  bool IsZeroBitPattern() const OVERRIDE {
2829    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2830  }
2831  bool IsOne() const OVERRIDE {
2832    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2833  }
2834  bool IsNaN() const {
2835    return std::isnan(value_);
2836  }
2837
2838  DECLARE_INSTRUCTION(FloatConstant);
2839
2840 private:
2841  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2842      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2843  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2844      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2845
2846  const float value_;
2847
2848  // Only the SsaBuilder and HGraph can create floating-point constants.
2849  friend class SsaBuilder;
2850  friend class HGraph;
2851  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2852};
2853
2854class HDoubleConstant FINAL : public HConstant {
2855 public:
2856  double GetValue() const { return value_; }
2857
2858  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2859
2860  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2861    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2862    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2863  }
2864
2865  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2866
2867  bool IsMinusOne() const OVERRIDE {
2868    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2869  }
2870  bool IsArithmeticZero() const OVERRIDE {
2871    return std::fpclassify(value_) == FP_ZERO;
2872  }
2873  bool IsArithmeticPositiveZero() const {
2874    return IsArithmeticZero() && !std::signbit(value_);
2875  }
2876  bool IsArithmeticNegativeZero() const {
2877    return IsArithmeticZero() && std::signbit(value_);
2878  }
2879  bool IsZeroBitPattern() const OVERRIDE {
2880    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2881  }
2882  bool IsOne() const OVERRIDE {
2883    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2884  }
2885  bool IsNaN() const {
2886    return std::isnan(value_);
2887  }
2888
2889  DECLARE_INSTRUCTION(DoubleConstant);
2890
2891 private:
2892  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2893      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2894  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2895      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2896
2897  const double value_;
2898
2899  // Only the SsaBuilder and HGraph can create floating-point constants.
2900  friend class SsaBuilder;
2901  friend class HGraph;
2902  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2903};
2904
2905// Conditional branch. A block ending with an HIf instruction must have
2906// two successors.
2907class HIf FINAL : public HTemplateInstruction<1> {
2908 public:
2909  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2910      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2911    SetRawInputAt(0, input);
2912  }
2913
2914  bool IsControlFlow() const OVERRIDE { return true; }
2915
2916  HBasicBlock* IfTrueSuccessor() const {
2917    return GetBlock()->GetSuccessors()[0];
2918  }
2919
2920  HBasicBlock* IfFalseSuccessor() const {
2921    return GetBlock()->GetSuccessors()[1];
2922  }
2923
2924  DECLARE_INSTRUCTION(If);
2925
2926 private:
2927  DISALLOW_COPY_AND_ASSIGN(HIf);
2928};
2929
2930
2931// Abstract instruction which marks the beginning and/or end of a try block and
2932// links it to the respective exception handlers. Behaves the same as a Goto in
2933// non-exceptional control flow.
2934// Normal-flow successor is stored at index zero, exception handlers under
2935// higher indices in no particular order.
2936class HTryBoundary FINAL : public HTemplateInstruction<0> {
2937 public:
2938  enum class BoundaryKind {
2939    kEntry,
2940    kExit,
2941    kLast = kExit
2942  };
2943
2944  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2945      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2946    SetPackedField<BoundaryKindField>(kind);
2947  }
2948
2949  bool IsControlFlow() const OVERRIDE { return true; }
2950
2951  // Returns the block's non-exceptional successor (index zero).
2952  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2953
2954  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2955    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2956  }
2957
2958  // Returns whether `handler` is among its exception handlers (non-zero index
2959  // successors).
2960  bool HasExceptionHandler(const HBasicBlock& handler) const {
2961    DCHECK(handler.IsCatchBlock());
2962    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2963  }
2964
2965  // If not present already, adds `handler` to its block's list of exception
2966  // handlers.
2967  void AddExceptionHandler(HBasicBlock* handler) {
2968    if (!HasExceptionHandler(*handler)) {
2969      GetBlock()->AddSuccessor(handler);
2970    }
2971  }
2972
2973  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2974  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2975
2976  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2977
2978  DECLARE_INSTRUCTION(TryBoundary);
2979
2980 private:
2981  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2982  static constexpr size_t kFieldBoundaryKindSize =
2983      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2984  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2985      kFieldBoundaryKind + kFieldBoundaryKindSize;
2986  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2987                "Too many packed fields.");
2988  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2989
2990  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2991};
2992
2993// Deoptimize to interpreter, upon checking a condition.
2994class HDeoptimize FINAL : public HVariableInputSizeInstruction {
2995 public:
2996  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
2997  // across.
2998  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc)
2999      : HVariableInputSizeInstruction(
3000            SideEffects::All(),
3001            dex_pc,
3002            arena,
3003            /* number_of_inputs */ 1,
3004            kArenaAllocMisc) {
3005    SetPackedFlag<kFieldCanBeMoved>(false);
3006    SetPackedField<DeoptimizeKindField>(kind);
3007    SetRawInputAt(0, cond);
3008  }
3009
3010  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3011  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3012  // instead of `guard`.
3013  // We set CanTriggerGC to prevent any intermediate address to be live
3014  // at the point of the `HDeoptimize`.
3015  HDeoptimize(ArenaAllocator* arena,
3016              HInstruction* cond,
3017              HInstruction* guard,
3018              DeoptimizationKind kind,
3019              uint32_t dex_pc)
3020      : HVariableInputSizeInstruction(
3021            SideEffects::CanTriggerGC(),
3022            dex_pc,
3023            arena,
3024            /* number_of_inputs */ 2,
3025            kArenaAllocMisc) {
3026    SetPackedFlag<kFieldCanBeMoved>(true);
3027    SetPackedField<DeoptimizeKindField>(kind);
3028    SetRawInputAt(0, cond);
3029    SetRawInputAt(1, guard);
3030  }
3031
3032  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3033
3034  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3035    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3036  }
3037
3038  bool NeedsEnvironment() const OVERRIDE { return true; }
3039
3040  bool CanThrow() const OVERRIDE { return true; }
3041
3042  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3043
3044  Primitive::Type GetType() const OVERRIDE {
3045    return GuardsAnInput() ? GuardedInput()->GetType() : Primitive::kPrimVoid;
3046  }
3047
3048  bool GuardsAnInput() const {
3049    return InputCount() == 2;
3050  }
3051
3052  HInstruction* GuardedInput() const {
3053    DCHECK(GuardsAnInput());
3054    return InputAt(1);
3055  }
3056
3057  void RemoveGuard() {
3058    RemoveInputAt(1);
3059  }
3060
3061  DECLARE_INSTRUCTION(Deoptimize);
3062
3063 private:
3064  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3065  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3066  static constexpr size_t kFieldDeoptimizeKindSize =
3067      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3068  static constexpr size_t kNumberOfDeoptimizePackedBits =
3069      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3070  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3071                "Too many packed fields.");
3072  using DeoptimizeKindField =
3073      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3074
3075  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3076};
3077
3078// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3079// The compiled code checks this flag value in a guard before devirtualized call and
3080// if it's true, starts to do deoptimization.
3081// It has a 4-byte slot on stack.
3082// TODO: allocate a register for this flag.
3083class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3084 public:
3085  // CHA guards are only optimized in a separate pass and it has no side effects
3086  // with regard to other passes.
3087  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3088      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3089  }
3090
3091  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
3092
3093  // We do all CHA guard elimination/motion in a single pass, after which there is no
3094  // further guard elimination/motion since a guard might have been used for justification
3095  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3096  // to avoid other optimizations trying to move it.
3097  bool CanBeMoved() const OVERRIDE { return false; }
3098
3099  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3100
3101 private:
3102  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3103};
3104
3105// Represents the ArtMethod that was passed as a first argument to
3106// the method. It is used by instructions that depend on it, like
3107// instructions that work with the dex cache.
3108class HCurrentMethod FINAL : public HExpression<0> {
3109 public:
3110  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
3111      : HExpression(type, SideEffects::None(), dex_pc) {}
3112
3113  DECLARE_INSTRUCTION(CurrentMethod);
3114
3115 private:
3116  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3117};
3118
3119// Fetches an ArtMethod from the virtual table or the interface method table
3120// of a class.
3121class HClassTableGet FINAL : public HExpression<1> {
3122 public:
3123  enum class TableKind {
3124    kVTable,
3125    kIMTable,
3126    kLast = kIMTable
3127  };
3128  HClassTableGet(HInstruction* cls,
3129                 Primitive::Type type,
3130                 TableKind kind,
3131                 size_t index,
3132                 uint32_t dex_pc)
3133      : HExpression(type, SideEffects::None(), dex_pc),
3134        index_(index) {
3135    SetPackedField<TableKindField>(kind);
3136    SetRawInputAt(0, cls);
3137  }
3138
3139  bool CanBeMoved() const OVERRIDE { return true; }
3140  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3141    return other->AsClassTableGet()->GetIndex() == index_ &&
3142        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3143  }
3144
3145  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3146  size_t GetIndex() const { return index_; }
3147
3148  DECLARE_INSTRUCTION(ClassTableGet);
3149
3150 private:
3151  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3152  static constexpr size_t kFieldTableKindSize =
3153      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3154  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3155  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3156                "Too many packed fields.");
3157  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3158
3159  // The index of the ArtMethod in the table.
3160  const size_t index_;
3161
3162  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3163};
3164
3165// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3166// have one successor for each entry in the switch table, and the final successor
3167// will be the block containing the next Dex opcode.
3168class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3169 public:
3170  HPackedSwitch(int32_t start_value,
3171                uint32_t num_entries,
3172                HInstruction* input,
3173                uint32_t dex_pc = kNoDexPc)
3174    : HTemplateInstruction(SideEffects::None(), dex_pc),
3175      start_value_(start_value),
3176      num_entries_(num_entries) {
3177    SetRawInputAt(0, input);
3178  }
3179
3180  bool IsControlFlow() const OVERRIDE { return true; }
3181
3182  int32_t GetStartValue() const { return start_value_; }
3183
3184  uint32_t GetNumEntries() const { return num_entries_; }
3185
3186  HBasicBlock* GetDefaultBlock() const {
3187    // Last entry is the default block.
3188    return GetBlock()->GetSuccessors()[num_entries_];
3189  }
3190  DECLARE_INSTRUCTION(PackedSwitch);
3191
3192 private:
3193  const int32_t start_value_;
3194  const uint32_t num_entries_;
3195
3196  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3197};
3198
3199class HUnaryOperation : public HExpression<1> {
3200 public:
3201  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3202      : HExpression(result_type, SideEffects::None(), dex_pc) {
3203    SetRawInputAt(0, input);
3204  }
3205
3206  HInstruction* GetInput() const { return InputAt(0); }
3207  Primitive::Type GetResultType() const { return GetType(); }
3208
3209  bool CanBeMoved() const OVERRIDE { return true; }
3210  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3211    return true;
3212  }
3213
3214  // Try to statically evaluate `this` and return a HConstant
3215  // containing the result of this evaluation.  If `this` cannot
3216  // be evaluated as a constant, return null.
3217  HConstant* TryStaticEvaluation() const;
3218
3219  // Apply this operation to `x`.
3220  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3221  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3222  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3223  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3224
3225  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3226
3227 private:
3228  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3229};
3230
3231class HBinaryOperation : public HExpression<2> {
3232 public:
3233  HBinaryOperation(Primitive::Type result_type,
3234                   HInstruction* left,
3235                   HInstruction* right,
3236                   SideEffects side_effects = SideEffects::None(),
3237                   uint32_t dex_pc = kNoDexPc)
3238      : HExpression(result_type, side_effects, dex_pc) {
3239    SetRawInputAt(0, left);
3240    SetRawInputAt(1, right);
3241  }
3242
3243  HInstruction* GetLeft() const { return InputAt(0); }
3244  HInstruction* GetRight() const { return InputAt(1); }
3245  Primitive::Type GetResultType() const { return GetType(); }
3246
3247  virtual bool IsCommutative() const { return false; }
3248
3249  // Put constant on the right.
3250  // Returns whether order is changed.
3251  bool OrderInputsWithConstantOnTheRight() {
3252    HInstruction* left = InputAt(0);
3253    HInstruction* right = InputAt(1);
3254    if (left->IsConstant() && !right->IsConstant()) {
3255      ReplaceInput(right, 0);
3256      ReplaceInput(left, 1);
3257      return true;
3258    }
3259    return false;
3260  }
3261
3262  // Order inputs by instruction id, but favor constant on the right side.
3263  // This helps GVN for commutative ops.
3264  void OrderInputs() {
3265    DCHECK(IsCommutative());
3266    HInstruction* left = InputAt(0);
3267    HInstruction* right = InputAt(1);
3268    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3269      return;
3270    }
3271    if (OrderInputsWithConstantOnTheRight()) {
3272      return;
3273    }
3274    // Order according to instruction id.
3275    if (left->GetId() > right->GetId()) {
3276      ReplaceInput(right, 0);
3277      ReplaceInput(left, 1);
3278    }
3279  }
3280
3281  bool CanBeMoved() const OVERRIDE { return true; }
3282  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3283    return true;
3284  }
3285
3286  // Try to statically evaluate `this` and return a HConstant
3287  // containing the result of this evaluation.  If `this` cannot
3288  // be evaluated as a constant, return null.
3289  HConstant* TryStaticEvaluation() const;
3290
3291  // Apply this operation to `x` and `y`.
3292  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3293                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3294    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3295    UNREACHABLE();
3296  }
3297  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3298  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3299  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3300                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3301    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3302    UNREACHABLE();
3303  }
3304  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3305  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3306
3307  // Returns an input that can legally be used as the right input and is
3308  // constant, or null.
3309  HConstant* GetConstantRight() const;
3310
3311  // If `GetConstantRight()` returns one of the input, this returns the other
3312  // one. Otherwise it returns null.
3313  HInstruction* GetLeastConstantLeft() const;
3314
3315  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3316
3317 private:
3318  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3319};
3320
3321// The comparison bias applies for floating point operations and indicates how NaN
3322// comparisons are treated:
3323enum class ComparisonBias {
3324  kNoBias,  // bias is not applicable (i.e. for long operation)
3325  kGtBias,  // return 1 for NaN comparisons
3326  kLtBias,  // return -1 for NaN comparisons
3327  kLast = kLtBias
3328};
3329
3330std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3331
3332class HCondition : public HBinaryOperation {
3333 public:
3334  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3335      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3336    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3337  }
3338
3339  // For code generation purposes, returns whether this instruction is just before
3340  // `instruction`, and disregard moves in between.
3341  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3342
3343  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3344
3345  virtual IfCondition GetCondition() const = 0;
3346
3347  virtual IfCondition GetOppositeCondition() const = 0;
3348
3349  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3350  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3351
3352  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3353  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3354
3355  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3356    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3357  }
3358
3359  bool IsFPConditionTrueIfNaN() const {
3360    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3361    IfCondition if_cond = GetCondition();
3362    if (if_cond == kCondNE) {
3363      return true;
3364    } else if (if_cond == kCondEQ) {
3365      return false;
3366    }
3367    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3368  }
3369
3370  bool IsFPConditionFalseIfNaN() const {
3371    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3372    IfCondition if_cond = GetCondition();
3373    if (if_cond == kCondEQ) {
3374      return true;
3375    } else if (if_cond == kCondNE) {
3376      return false;
3377    }
3378    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3379  }
3380
3381 protected:
3382  // Needed if we merge a HCompare into a HCondition.
3383  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3384  static constexpr size_t kFieldComparisonBiasSize =
3385      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3386  static constexpr size_t kNumberOfConditionPackedBits =
3387      kFieldComparisonBias + kFieldComparisonBiasSize;
3388  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3389  using ComparisonBiasField =
3390      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3391
3392  template <typename T>
3393  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3394
3395  template <typename T>
3396  int32_t CompareFP(T x, T y) const {
3397    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3398    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3399    // Handle the bias.
3400    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3401  }
3402
3403  // Return an integer constant containing the result of a condition evaluated at compile time.
3404  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3405    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3406  }
3407
3408 private:
3409  DISALLOW_COPY_AND_ASSIGN(HCondition);
3410};
3411
3412// Instruction to check if two inputs are equal to each other.
3413class HEqual FINAL : public HCondition {
3414 public:
3415  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3416      : HCondition(first, second, dex_pc) {}
3417
3418  bool IsCommutative() const OVERRIDE { return true; }
3419
3420  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3421                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3422    return MakeConstantCondition(true, GetDexPc());
3423  }
3424  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3425    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3426  }
3427  // In the following Evaluate methods, a HCompare instruction has
3428  // been merged into this HEqual instruction; evaluate it as
3429  // `Compare(x, y) == 0`.
3430  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3431    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3432                                 GetDexPc());
3433  }
3434  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3435    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3436  }
3437  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3438    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3439  }
3440
3441  DECLARE_INSTRUCTION(Equal);
3442
3443  IfCondition GetCondition() const OVERRIDE {
3444    return kCondEQ;
3445  }
3446
3447  IfCondition GetOppositeCondition() const OVERRIDE {
3448    return kCondNE;
3449  }
3450
3451 private:
3452  template <typename T> static bool Compute(T x, T y) { return x == y; }
3453
3454  DISALLOW_COPY_AND_ASSIGN(HEqual);
3455};
3456
3457class HNotEqual FINAL : public HCondition {
3458 public:
3459  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3460      : HCondition(first, second, dex_pc) {}
3461
3462  bool IsCommutative() const OVERRIDE { return true; }
3463
3464  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3465                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3466    return MakeConstantCondition(false, GetDexPc());
3467  }
3468  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3469    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3470  }
3471  // In the following Evaluate methods, a HCompare instruction has
3472  // been merged into this HNotEqual instruction; evaluate it as
3473  // `Compare(x, y) != 0`.
3474  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3475    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3476  }
3477  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3478    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3479  }
3480  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3481    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3482  }
3483
3484  DECLARE_INSTRUCTION(NotEqual);
3485
3486  IfCondition GetCondition() const OVERRIDE {
3487    return kCondNE;
3488  }
3489
3490  IfCondition GetOppositeCondition() const OVERRIDE {
3491    return kCondEQ;
3492  }
3493
3494 private:
3495  template <typename T> static bool Compute(T x, T y) { return x != y; }
3496
3497  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3498};
3499
3500class HLessThan FINAL : public HCondition {
3501 public:
3502  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3503      : HCondition(first, second, dex_pc) {}
3504
3505  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3506    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3507  }
3508  // In the following Evaluate methods, a HCompare instruction has
3509  // been merged into this HLessThan instruction; evaluate it as
3510  // `Compare(x, y) < 0`.
3511  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3512    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3513  }
3514  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3515    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3516  }
3517  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3518    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3519  }
3520
3521  DECLARE_INSTRUCTION(LessThan);
3522
3523  IfCondition GetCondition() const OVERRIDE {
3524    return kCondLT;
3525  }
3526
3527  IfCondition GetOppositeCondition() const OVERRIDE {
3528    return kCondGE;
3529  }
3530
3531 private:
3532  template <typename T> static bool Compute(T x, T y) { return x < y; }
3533
3534  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3535};
3536
3537class HLessThanOrEqual FINAL : public HCondition {
3538 public:
3539  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3540      : HCondition(first, second, dex_pc) {}
3541
3542  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3543    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3544  }
3545  // In the following Evaluate methods, a HCompare instruction has
3546  // been merged into this HLessThanOrEqual instruction; evaluate it as
3547  // `Compare(x, y) <= 0`.
3548  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3549    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3550  }
3551  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3552    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3553  }
3554  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3555    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3556  }
3557
3558  DECLARE_INSTRUCTION(LessThanOrEqual);
3559
3560  IfCondition GetCondition() const OVERRIDE {
3561    return kCondLE;
3562  }
3563
3564  IfCondition GetOppositeCondition() const OVERRIDE {
3565    return kCondGT;
3566  }
3567
3568 private:
3569  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3570
3571  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3572};
3573
3574class HGreaterThan FINAL : public HCondition {
3575 public:
3576  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3577      : HCondition(first, second, dex_pc) {}
3578
3579  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3580    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3581  }
3582  // In the following Evaluate methods, a HCompare instruction has
3583  // been merged into this HGreaterThan instruction; evaluate it as
3584  // `Compare(x, y) > 0`.
3585  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3586    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3587  }
3588  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3589    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3590  }
3591  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3592    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3593  }
3594
3595  DECLARE_INSTRUCTION(GreaterThan);
3596
3597  IfCondition GetCondition() const OVERRIDE {
3598    return kCondGT;
3599  }
3600
3601  IfCondition GetOppositeCondition() const OVERRIDE {
3602    return kCondLE;
3603  }
3604
3605 private:
3606  template <typename T> static bool Compute(T x, T y) { return x > y; }
3607
3608  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3609};
3610
3611class HGreaterThanOrEqual FINAL : public HCondition {
3612 public:
3613  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3614      : HCondition(first, second, dex_pc) {}
3615
3616  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3617    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3618  }
3619  // In the following Evaluate methods, a HCompare instruction has
3620  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3621  // `Compare(x, y) >= 0`.
3622  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3623    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3624  }
3625  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3626    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3627  }
3628  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3629    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3630  }
3631
3632  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3633
3634  IfCondition GetCondition() const OVERRIDE {
3635    return kCondGE;
3636  }
3637
3638  IfCondition GetOppositeCondition() const OVERRIDE {
3639    return kCondLT;
3640  }
3641
3642 private:
3643  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3644
3645  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3646};
3647
3648class HBelow FINAL : public HCondition {
3649 public:
3650  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3651      : HCondition(first, second, dex_pc) {}
3652
3653  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3654    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3655  }
3656  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3657    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3658  }
3659  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3660                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3661    LOG(FATAL) << DebugName() << " is not defined for float values";
3662    UNREACHABLE();
3663  }
3664  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3665                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3666    LOG(FATAL) << DebugName() << " is not defined for double values";
3667    UNREACHABLE();
3668  }
3669
3670  DECLARE_INSTRUCTION(Below);
3671
3672  IfCondition GetCondition() const OVERRIDE {
3673    return kCondB;
3674  }
3675
3676  IfCondition GetOppositeCondition() const OVERRIDE {
3677    return kCondAE;
3678  }
3679
3680 private:
3681  template <typename T> static bool Compute(T x, T y) {
3682    return MakeUnsigned(x) < MakeUnsigned(y);
3683  }
3684
3685  DISALLOW_COPY_AND_ASSIGN(HBelow);
3686};
3687
3688class HBelowOrEqual FINAL : public HCondition {
3689 public:
3690  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3691      : HCondition(first, second, dex_pc) {}
3692
3693  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3694    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3695  }
3696  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3697    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3698  }
3699  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3700                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3701    LOG(FATAL) << DebugName() << " is not defined for float values";
3702    UNREACHABLE();
3703  }
3704  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3705                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3706    LOG(FATAL) << DebugName() << " is not defined for double values";
3707    UNREACHABLE();
3708  }
3709
3710  DECLARE_INSTRUCTION(BelowOrEqual);
3711
3712  IfCondition GetCondition() const OVERRIDE {
3713    return kCondBE;
3714  }
3715
3716  IfCondition GetOppositeCondition() const OVERRIDE {
3717    return kCondA;
3718  }
3719
3720 private:
3721  template <typename T> static bool Compute(T x, T y) {
3722    return MakeUnsigned(x) <= MakeUnsigned(y);
3723  }
3724
3725  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3726};
3727
3728class HAbove FINAL : public HCondition {
3729 public:
3730  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3731      : HCondition(first, second, dex_pc) {}
3732
3733  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3734    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3735  }
3736  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3737    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3738  }
3739  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3740                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3741    LOG(FATAL) << DebugName() << " is not defined for float values";
3742    UNREACHABLE();
3743  }
3744  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3745                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3746    LOG(FATAL) << DebugName() << " is not defined for double values";
3747    UNREACHABLE();
3748  }
3749
3750  DECLARE_INSTRUCTION(Above);
3751
3752  IfCondition GetCondition() const OVERRIDE {
3753    return kCondA;
3754  }
3755
3756  IfCondition GetOppositeCondition() const OVERRIDE {
3757    return kCondBE;
3758  }
3759
3760 private:
3761  template <typename T> static bool Compute(T x, T y) {
3762    return MakeUnsigned(x) > MakeUnsigned(y);
3763  }
3764
3765  DISALLOW_COPY_AND_ASSIGN(HAbove);
3766};
3767
3768class HAboveOrEqual FINAL : public HCondition {
3769 public:
3770  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3771      : HCondition(first, second, dex_pc) {}
3772
3773  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3774    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3775  }
3776  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3777    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3778  }
3779  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3780                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3781    LOG(FATAL) << DebugName() << " is not defined for float values";
3782    UNREACHABLE();
3783  }
3784  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3785                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3786    LOG(FATAL) << DebugName() << " is not defined for double values";
3787    UNREACHABLE();
3788  }
3789
3790  DECLARE_INSTRUCTION(AboveOrEqual);
3791
3792  IfCondition GetCondition() const OVERRIDE {
3793    return kCondAE;
3794  }
3795
3796  IfCondition GetOppositeCondition() const OVERRIDE {
3797    return kCondB;
3798  }
3799
3800 private:
3801  template <typename T> static bool Compute(T x, T y) {
3802    return MakeUnsigned(x) >= MakeUnsigned(y);
3803  }
3804
3805  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3806};
3807
3808// Instruction to check how two inputs compare to each other.
3809// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3810class HCompare FINAL : public HBinaryOperation {
3811 public:
3812  // Note that `comparison_type` is the type of comparison performed
3813  // between the comparison's inputs, not the type of the instantiated
3814  // HCompare instruction (which is always Primitive::kPrimInt).
3815  HCompare(Primitive::Type comparison_type,
3816           HInstruction* first,
3817           HInstruction* second,
3818           ComparisonBias bias,
3819           uint32_t dex_pc)
3820      : HBinaryOperation(Primitive::kPrimInt,
3821                         first,
3822                         second,
3823                         SideEffectsForArchRuntimeCalls(comparison_type),
3824                         dex_pc) {
3825    SetPackedField<ComparisonBiasField>(bias);
3826    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3827    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3828  }
3829
3830  template <typename T>
3831  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3832
3833  template <typename T>
3834  int32_t ComputeFP(T x, T y) const {
3835    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3836    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3837    // Handle the bias.
3838    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3839  }
3840
3841  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3842    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3843    // reach this code path when processing a freshly built HIR
3844    // graph. However HCompare integer instructions can be synthesized
3845    // by the instruction simplifier to implement IntegerCompare and
3846    // IntegerSignum intrinsics, so we have to handle this case.
3847    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3848  }
3849  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3850    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3851  }
3852  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3853    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3854  }
3855  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3856    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3857  }
3858
3859  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3860    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3861  }
3862
3863  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3864
3865  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3866  // Only meaningful for floating-point comparisons.
3867  bool IsGtBias() const {
3868    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3869    return GetBias() == ComparisonBias::kGtBias;
3870  }
3871
3872  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3873    // Comparisons do not require a runtime call in any back end.
3874    return SideEffects::None();
3875  }
3876
3877  DECLARE_INSTRUCTION(Compare);
3878
3879 protected:
3880  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3881  static constexpr size_t kFieldComparisonBiasSize =
3882      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3883  static constexpr size_t kNumberOfComparePackedBits =
3884      kFieldComparisonBias + kFieldComparisonBiasSize;
3885  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3886  using ComparisonBiasField =
3887      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3888
3889  // Return an integer constant containing the result of a comparison evaluated at compile time.
3890  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3891    DCHECK(value == -1 || value == 0 || value == 1) << value;
3892    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3893  }
3894
3895 private:
3896  DISALLOW_COPY_AND_ASSIGN(HCompare);
3897};
3898
3899class HNewInstance FINAL : public HExpression<1> {
3900 public:
3901  HNewInstance(HInstruction* cls,
3902               uint32_t dex_pc,
3903               dex::TypeIndex type_index,
3904               const DexFile& dex_file,
3905               bool finalizable,
3906               QuickEntrypointEnum entrypoint)
3907      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3908        type_index_(type_index),
3909        dex_file_(dex_file),
3910        entrypoint_(entrypoint) {
3911    SetPackedFlag<kFlagFinalizable>(finalizable);
3912    SetRawInputAt(0, cls);
3913  }
3914
3915  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3916  const DexFile& GetDexFile() const { return dex_file_; }
3917
3918  // Calls runtime so needs an environment.
3919  bool NeedsEnvironment() const OVERRIDE { return true; }
3920
3921  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3922  bool CanThrow() const OVERRIDE { return true; }
3923
3924  bool NeedsChecks() const {
3925    return entrypoint_ == kQuickAllocObjectWithChecks;
3926  }
3927
3928  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3929
3930  bool CanBeNull() const OVERRIDE { return false; }
3931
3932  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3933
3934  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3935    entrypoint_ = entrypoint;
3936  }
3937
3938  HLoadClass* GetLoadClass() const {
3939    HInstruction* input = InputAt(0);
3940    if (input->IsClinitCheck()) {
3941      input = input->InputAt(0);
3942    }
3943    DCHECK(input->IsLoadClass());
3944    return input->AsLoadClass();
3945  }
3946
3947  bool IsStringAlloc() const;
3948
3949  DECLARE_INSTRUCTION(NewInstance);
3950
3951 private:
3952  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3953  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3954  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3955                "Too many packed fields.");
3956
3957  const dex::TypeIndex type_index_;
3958  const DexFile& dex_file_;
3959  QuickEntrypointEnum entrypoint_;
3960
3961  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3962};
3963
3964enum IntrinsicNeedsEnvironmentOrCache {
3965  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3966  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3967};
3968
3969enum IntrinsicSideEffects {
3970  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3971  kReadSideEffects,   // Intrinsic may read heap memory.
3972  kWriteSideEffects,  // Intrinsic may write heap memory.
3973  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3974};
3975
3976enum IntrinsicExceptions {
3977  kNoThrow,  // Intrinsic does not throw any exceptions.
3978  kCanThrow  // Intrinsic may throw exceptions.
3979};
3980
3981class HInvoke : public HVariableInputSizeInstruction {
3982 public:
3983  bool NeedsEnvironment() const OVERRIDE;
3984
3985  void SetArgumentAt(size_t index, HInstruction* argument) {
3986    SetRawInputAt(index, argument);
3987  }
3988
3989  // Return the number of arguments.  This number can be lower than
3990  // the number of inputs returned by InputCount(), as some invoke
3991  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3992  // inputs at the end of their list of inputs.
3993  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3994
3995  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3996
3997  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3998
3999  InvokeType GetInvokeType() const {
4000    return GetPackedField<InvokeTypeField>();
4001  }
4002
4003  Intrinsics GetIntrinsic() const {
4004    return intrinsic_;
4005  }
4006
4007  void SetIntrinsic(Intrinsics intrinsic,
4008                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4009                    IntrinsicSideEffects side_effects,
4010                    IntrinsicExceptions exceptions);
4011
4012  bool IsFromInlinedInvoke() const {
4013    return GetEnvironment()->IsFromInlinedInvoke();
4014  }
4015
4016  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4017
4018  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4019
4020  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4021
4022  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4023    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4024  }
4025
4026  uint32_t* GetIntrinsicOptimizations() {
4027    return &intrinsic_optimizations_;
4028  }
4029
4030  const uint32_t* GetIntrinsicOptimizations() const {
4031    return &intrinsic_optimizations_;
4032  }
4033
4034  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4035
4036  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4037  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4038
4039  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4040
4041 protected:
4042  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4043  static constexpr size_t kFieldInvokeTypeSize =
4044      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4045  static constexpr size_t kFieldReturnType =
4046      kFieldInvokeType + kFieldInvokeTypeSize;
4047  static constexpr size_t kFieldReturnTypeSize =
4048      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
4049  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4050  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4051  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4052  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4053  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
4054
4055  HInvoke(ArenaAllocator* arena,
4056          uint32_t number_of_arguments,
4057          uint32_t number_of_other_inputs,
4058          Primitive::Type return_type,
4059          uint32_t dex_pc,
4060          uint32_t dex_method_index,
4061          ArtMethod* resolved_method,
4062          InvokeType invoke_type)
4063    : HVariableInputSizeInstruction(
4064          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4065          dex_pc,
4066          arena,
4067          number_of_arguments + number_of_other_inputs,
4068          kArenaAllocInvokeInputs),
4069      number_of_arguments_(number_of_arguments),
4070      resolved_method_(resolved_method),
4071      dex_method_index_(dex_method_index),
4072      intrinsic_(Intrinsics::kNone),
4073      intrinsic_optimizations_(0) {
4074    SetPackedField<ReturnTypeField>(return_type);
4075    SetPackedField<InvokeTypeField>(invoke_type);
4076    SetPackedFlag<kFlagCanThrow>(true);
4077  }
4078
4079  uint32_t number_of_arguments_;
4080  ArtMethod* resolved_method_;
4081  const uint32_t dex_method_index_;
4082  Intrinsics intrinsic_;
4083
4084  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4085  uint32_t intrinsic_optimizations_;
4086
4087 private:
4088  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4089};
4090
4091class HInvokeUnresolved FINAL : public HInvoke {
4092 public:
4093  HInvokeUnresolved(ArenaAllocator* arena,
4094                    uint32_t number_of_arguments,
4095                    Primitive::Type return_type,
4096                    uint32_t dex_pc,
4097                    uint32_t dex_method_index,
4098                    InvokeType invoke_type)
4099      : HInvoke(arena,
4100                number_of_arguments,
4101                0u /* number_of_other_inputs */,
4102                return_type,
4103                dex_pc,
4104                dex_method_index,
4105                nullptr,
4106                invoke_type) {
4107  }
4108
4109  DECLARE_INSTRUCTION(InvokeUnresolved);
4110
4111 private:
4112  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4113};
4114
4115class HInvokePolymorphic FINAL : public HInvoke {
4116 public:
4117  HInvokePolymorphic(ArenaAllocator* arena,
4118                     uint32_t number_of_arguments,
4119                     Primitive::Type return_type,
4120                     uint32_t dex_pc,
4121                     uint32_t dex_method_index)
4122      : HInvoke(arena,
4123                number_of_arguments,
4124                0u /* number_of_other_inputs */,
4125                return_type,
4126                dex_pc,
4127                dex_method_index,
4128                nullptr,
4129                kVirtual) {}
4130
4131  DECLARE_INSTRUCTION(InvokePolymorphic);
4132
4133 private:
4134  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4135};
4136
4137class HInvokeStaticOrDirect FINAL : public HInvoke {
4138 public:
4139  // Requirements of this method call regarding the class
4140  // initialization (clinit) check of its declaring class.
4141  enum class ClinitCheckRequirement {
4142    kNone,      // Class already initialized.
4143    kExplicit,  // Static call having explicit clinit check as last input.
4144    kImplicit,  // Static call implicitly requiring a clinit check.
4145    kLast = kImplicit
4146  };
4147
4148  // Determines how to load the target ArtMethod*.
4149  enum class MethodLoadKind {
4150    // Use a String init ArtMethod* loaded from Thread entrypoints.
4151    kStringInit,
4152
4153    // Use the method's own ArtMethod* loaded by the register allocator.
4154    kRecursive,
4155
4156    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4157    // Used for boot image methods referenced by boot image code.
4158    kBootImageLinkTimePcRelative,
4159
4160    // Use ArtMethod* at a known address, embed the direct address in the code.
4161    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4162    kDirectAddress,
4163
4164    // Load from resolved methods array in the dex cache using a PC-relative load.
4165    // Used when we need to use the dex cache, for example for invoke-static that
4166    // may cause class initialization (the entry may point to a resolution method),
4167    // and we know that we can access the dex cache arrays using a PC-relative load.
4168    kDexCachePcRelative,
4169
4170    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4171    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4172    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4173    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4174    kDexCacheViaMethod,
4175  };
4176
4177  // Determines the location of the code pointer.
4178  enum class CodePtrLocation {
4179    // Recursive call, use local PC-relative call instruction.
4180    kCallSelf,
4181
4182    // Use code pointer from the ArtMethod*.
4183    // Used when we don't know the target code. This is also the last-resort-kind used when
4184    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4185    kCallArtMethod,
4186  };
4187
4188  struct DispatchInfo {
4189    MethodLoadKind method_load_kind;
4190    CodePtrLocation code_ptr_location;
4191    // The method load data holds
4192    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4193    //     Note that there are multiple string init methods, each having its own offset.
4194    //   - the method address for kDirectAddress
4195    //   - the dex cache arrays offset for kDexCachePcRel.
4196    uint64_t method_load_data;
4197  };
4198
4199  HInvokeStaticOrDirect(ArenaAllocator* arena,
4200                        uint32_t number_of_arguments,
4201                        Primitive::Type return_type,
4202                        uint32_t dex_pc,
4203                        uint32_t method_index,
4204                        ArtMethod* resolved_method,
4205                        DispatchInfo dispatch_info,
4206                        InvokeType invoke_type,
4207                        MethodReference target_method,
4208                        ClinitCheckRequirement clinit_check_requirement)
4209      : HInvoke(arena,
4210                number_of_arguments,
4211                // There is potentially one extra argument for the HCurrentMethod node, and
4212                // potentially one other if the clinit check is explicit, and potentially
4213                // one other if the method is a string factory.
4214                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4215                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4216                return_type,
4217                dex_pc,
4218                method_index,
4219                resolved_method,
4220                invoke_type),
4221        target_method_(target_method),
4222        dispatch_info_(dispatch_info) {
4223    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4224  }
4225
4226  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4227    bool had_current_method_input = HasCurrentMethodInput();
4228    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4229
4230    // Using the current method is the default and once we find a better
4231    // method load kind, we should not go back to using the current method.
4232    DCHECK(had_current_method_input || !needs_current_method_input);
4233
4234    if (had_current_method_input && !needs_current_method_input) {
4235      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4236      RemoveInputAt(GetSpecialInputIndex());
4237    }
4238    dispatch_info_ = dispatch_info;
4239  }
4240
4241  DispatchInfo GetDispatchInfo() const {
4242    return dispatch_info_;
4243  }
4244
4245  void AddSpecialInput(HInstruction* input) {
4246    // We allow only one special input.
4247    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4248    DCHECK(InputCount() == GetSpecialInputIndex() ||
4249           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4250    InsertInputAt(GetSpecialInputIndex(), input);
4251  }
4252
4253  using HInstruction::GetInputRecords;  // Keep the const version visible.
4254  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4255    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4256    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4257      DCHECK(!input_records.empty());
4258      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4259      HInstruction* last_input = input_records.back().GetInstruction();
4260      // Note: `last_input` may be null during arguments setup.
4261      if (last_input != nullptr) {
4262        // `last_input` is the last input of a static invoke marked as having
4263        // an explicit clinit check. It must either be:
4264        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4265        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4266        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4267      }
4268    }
4269    return input_records;
4270  }
4271
4272  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4273    // We access the method via the dex cache so we can't do an implicit null check.
4274    // TODO: for intrinsics we can generate implicit null checks.
4275    return false;
4276  }
4277
4278  bool CanBeNull() const OVERRIDE {
4279    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4280  }
4281
4282  // Get the index of the special input, if any.
4283  //
4284  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4285  // method pointer; otherwise there may be one platform-specific special input,
4286  // such as PC-relative addressing base.
4287  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4288  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4289
4290  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4291  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4292  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4293  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4294  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4295  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4296  bool HasPcRelativeDexCache() const {
4297    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4298  }
4299  bool HasPcRelativeMethodLoadKind() const {
4300    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4301           GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4302  }
4303  bool HasCurrentMethodInput() const {
4304    // This function can be called only after the invoke has been fully initialized by the builder.
4305    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4306      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4307      return true;
4308    } else {
4309      DCHECK(InputCount() == GetSpecialInputIndex() ||
4310             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4311      return false;
4312    }
4313  }
4314
4315  QuickEntrypointEnum GetStringInitEntryPoint() const {
4316    DCHECK(IsStringInit());
4317    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4318  }
4319
4320  uint64_t GetMethodAddress() const {
4321    DCHECK(HasMethodAddress());
4322    return dispatch_info_.method_load_data;
4323  }
4324
4325  uint32_t GetDexCacheArrayOffset() const {
4326    DCHECK(HasPcRelativeDexCache());
4327    return dispatch_info_.method_load_data;
4328  }
4329
4330  const DexFile& GetDexFileForPcRelativeDexCache() const;
4331
4332  ClinitCheckRequirement GetClinitCheckRequirement() const {
4333    return GetPackedField<ClinitCheckRequirementField>();
4334  }
4335
4336  // Is this instruction a call to a static method?
4337  bool IsStatic() const {
4338    return GetInvokeType() == kStatic;
4339  }
4340
4341  MethodReference GetTargetMethod() const {
4342    return target_method_;
4343  }
4344
4345  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4346  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4347  // instruction; only relevant for static calls with explicit clinit check.
4348  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4349    DCHECK(IsStaticWithExplicitClinitCheck());
4350    size_t last_input_index = inputs_.size() - 1u;
4351    HInstruction* last_input = inputs_.back().GetInstruction();
4352    DCHECK(last_input != nullptr);
4353    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4354    RemoveAsUserOfInput(last_input_index);
4355    inputs_.pop_back();
4356    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4357    DCHECK(!IsStaticWithExplicitClinitCheck());
4358  }
4359
4360  // Is this a call to a static method whose declaring class has an
4361  // explicit initialization check in the graph?
4362  bool IsStaticWithExplicitClinitCheck() const {
4363    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4364  }
4365
4366  // Is this a call to a static method whose declaring class has an
4367  // implicit intialization check requirement?
4368  bool IsStaticWithImplicitClinitCheck() const {
4369    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4370  }
4371
4372  // Does this method load kind need the current method as an input?
4373  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4374    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4375  }
4376
4377  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4378
4379 private:
4380  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4381  static constexpr size_t kFieldClinitCheckRequirementSize =
4382      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4383  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4384      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4385  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4386                "Too many packed fields.");
4387  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4388                                               kFieldClinitCheckRequirement,
4389                                               kFieldClinitCheckRequirementSize>;
4390
4391  // Cached values of the resolved method, to avoid needing the mutator lock.
4392  MethodReference target_method_;
4393  DispatchInfo dispatch_info_;
4394
4395  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4396};
4397std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4398std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4399
4400class HInvokeVirtual FINAL : public HInvoke {
4401 public:
4402  HInvokeVirtual(ArenaAllocator* arena,
4403                 uint32_t number_of_arguments,
4404                 Primitive::Type return_type,
4405                 uint32_t dex_pc,
4406                 uint32_t dex_method_index,
4407                 ArtMethod* resolved_method,
4408                 uint32_t vtable_index)
4409      : HInvoke(arena,
4410                number_of_arguments,
4411                0u,
4412                return_type,
4413                dex_pc,
4414                dex_method_index,
4415                resolved_method,
4416                kVirtual),
4417        vtable_index_(vtable_index) {}
4418
4419  bool CanBeNull() const OVERRIDE {
4420    switch (GetIntrinsic()) {
4421      case Intrinsics::kThreadCurrentThread:
4422      case Intrinsics::kStringBufferAppend:
4423      case Intrinsics::kStringBufferToString:
4424      case Intrinsics::kStringBuilderAppend:
4425      case Intrinsics::kStringBuilderToString:
4426        return false;
4427      default:
4428        return HInvoke::CanBeNull();
4429    }
4430  }
4431
4432  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4433    // TODO: Add implicit null checks in intrinsics.
4434    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4435  }
4436
4437  uint32_t GetVTableIndex() const { return vtable_index_; }
4438
4439  DECLARE_INSTRUCTION(InvokeVirtual);
4440
4441 private:
4442  // Cached value of the resolved method, to avoid needing the mutator lock.
4443  const uint32_t vtable_index_;
4444
4445  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4446};
4447
4448class HInvokeInterface FINAL : public HInvoke {
4449 public:
4450  HInvokeInterface(ArenaAllocator* arena,
4451                   uint32_t number_of_arguments,
4452                   Primitive::Type return_type,
4453                   uint32_t dex_pc,
4454                   uint32_t dex_method_index,
4455                   ArtMethod* resolved_method,
4456                   uint32_t imt_index)
4457      : HInvoke(arena,
4458                number_of_arguments,
4459                0u,
4460                return_type,
4461                dex_pc,
4462                dex_method_index,
4463                resolved_method,
4464                kInterface),
4465        imt_index_(imt_index) {}
4466
4467  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4468    // TODO: Add implicit null checks in intrinsics.
4469    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4470  }
4471
4472  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4473    // The assembly stub currently needs it.
4474    return true;
4475  }
4476
4477  uint32_t GetImtIndex() const { return imt_index_; }
4478  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4479
4480  DECLARE_INSTRUCTION(InvokeInterface);
4481
4482 private:
4483  // Cached value of the resolved method, to avoid needing the mutator lock.
4484  const uint32_t imt_index_;
4485
4486  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4487};
4488
4489class HNeg FINAL : public HUnaryOperation {
4490 public:
4491  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4492      : HUnaryOperation(result_type, input, dex_pc) {
4493    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4494  }
4495
4496  template <typename T> static T Compute(T x) { return -x; }
4497
4498  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4499    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4500  }
4501  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4502    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4503  }
4504  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4505    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4506  }
4507  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4508    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4509  }
4510
4511  DECLARE_INSTRUCTION(Neg);
4512
4513 private:
4514  DISALLOW_COPY_AND_ASSIGN(HNeg);
4515};
4516
4517class HNewArray FINAL : public HExpression<2> {
4518 public:
4519  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4520      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) {
4521    SetRawInputAt(0, cls);
4522    SetRawInputAt(1, length);
4523  }
4524
4525  // Calls runtime so needs an environment.
4526  bool NeedsEnvironment() const OVERRIDE { return true; }
4527
4528  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4529  bool CanThrow() const OVERRIDE { return true; }
4530
4531  bool CanBeNull() const OVERRIDE { return false; }
4532
4533  HLoadClass* GetLoadClass() const {
4534    DCHECK(InputAt(0)->IsLoadClass());
4535    return InputAt(0)->AsLoadClass();
4536  }
4537
4538  HInstruction* GetLength() const {
4539    return InputAt(1);
4540  }
4541
4542  DECLARE_INSTRUCTION(NewArray);
4543
4544 private:
4545  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4546};
4547
4548class HAdd FINAL : public HBinaryOperation {
4549 public:
4550  HAdd(Primitive::Type result_type,
4551       HInstruction* left,
4552       HInstruction* right,
4553       uint32_t dex_pc = kNoDexPc)
4554      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4555
4556  bool IsCommutative() const OVERRIDE { return true; }
4557
4558  template <typename T> static T Compute(T x, T y) { return x + y; }
4559
4560  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4561    return GetBlock()->GetGraph()->GetIntConstant(
4562        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4563  }
4564  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4565    return GetBlock()->GetGraph()->GetLongConstant(
4566        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4567  }
4568  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4569    return GetBlock()->GetGraph()->GetFloatConstant(
4570        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4571  }
4572  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4573    return GetBlock()->GetGraph()->GetDoubleConstant(
4574        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4575  }
4576
4577  DECLARE_INSTRUCTION(Add);
4578
4579 private:
4580  DISALLOW_COPY_AND_ASSIGN(HAdd);
4581};
4582
4583class HSub FINAL : public HBinaryOperation {
4584 public:
4585  HSub(Primitive::Type result_type,
4586       HInstruction* left,
4587       HInstruction* right,
4588       uint32_t dex_pc = kNoDexPc)
4589      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4590
4591  template <typename T> static T Compute(T x, T y) { return x - y; }
4592
4593  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4594    return GetBlock()->GetGraph()->GetIntConstant(
4595        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4596  }
4597  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4598    return GetBlock()->GetGraph()->GetLongConstant(
4599        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4600  }
4601  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4602    return GetBlock()->GetGraph()->GetFloatConstant(
4603        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4604  }
4605  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4606    return GetBlock()->GetGraph()->GetDoubleConstant(
4607        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4608  }
4609
4610  DECLARE_INSTRUCTION(Sub);
4611
4612 private:
4613  DISALLOW_COPY_AND_ASSIGN(HSub);
4614};
4615
4616class HMul FINAL : public HBinaryOperation {
4617 public:
4618  HMul(Primitive::Type result_type,
4619       HInstruction* left,
4620       HInstruction* right,
4621       uint32_t dex_pc = kNoDexPc)
4622      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4623
4624  bool IsCommutative() const OVERRIDE { return true; }
4625
4626  template <typename T> static T Compute(T x, T y) { return x * y; }
4627
4628  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4629    return GetBlock()->GetGraph()->GetIntConstant(
4630        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4631  }
4632  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4633    return GetBlock()->GetGraph()->GetLongConstant(
4634        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4635  }
4636  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4637    return GetBlock()->GetGraph()->GetFloatConstant(
4638        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4639  }
4640  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4641    return GetBlock()->GetGraph()->GetDoubleConstant(
4642        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4643  }
4644
4645  DECLARE_INSTRUCTION(Mul);
4646
4647 private:
4648  DISALLOW_COPY_AND_ASSIGN(HMul);
4649};
4650
4651class HDiv FINAL : public HBinaryOperation {
4652 public:
4653  HDiv(Primitive::Type result_type,
4654       HInstruction* left,
4655       HInstruction* right,
4656       uint32_t dex_pc)
4657      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4658
4659  template <typename T>
4660  T ComputeIntegral(T x, T y) const {
4661    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4662    // Our graph structure ensures we never have 0 for `y` during
4663    // constant folding.
4664    DCHECK_NE(y, 0);
4665    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4666    return (y == -1) ? -x : x / y;
4667  }
4668
4669  template <typename T>
4670  T ComputeFP(T x, T y) const {
4671    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4672    return x / y;
4673  }
4674
4675  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4676    return GetBlock()->GetGraph()->GetIntConstant(
4677        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4678  }
4679  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4680    return GetBlock()->GetGraph()->GetLongConstant(
4681        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4682  }
4683  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4684    return GetBlock()->GetGraph()->GetFloatConstant(
4685        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4686  }
4687  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4688    return GetBlock()->GetGraph()->GetDoubleConstant(
4689        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4690  }
4691
4692  DECLARE_INSTRUCTION(Div);
4693
4694 private:
4695  DISALLOW_COPY_AND_ASSIGN(HDiv);
4696};
4697
4698class HRem FINAL : public HBinaryOperation {
4699 public:
4700  HRem(Primitive::Type result_type,
4701       HInstruction* left,
4702       HInstruction* right,
4703       uint32_t dex_pc)
4704      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4705
4706  template <typename T>
4707  T ComputeIntegral(T x, T y) const {
4708    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4709    // Our graph structure ensures we never have 0 for `y` during
4710    // constant folding.
4711    DCHECK_NE(y, 0);
4712    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4713    return (y == -1) ? 0 : x % y;
4714  }
4715
4716  template <typename T>
4717  T ComputeFP(T x, T y) const {
4718    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4719    return std::fmod(x, y);
4720  }
4721
4722  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4723    return GetBlock()->GetGraph()->GetIntConstant(
4724        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4725  }
4726  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4727    return GetBlock()->GetGraph()->GetLongConstant(
4728        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4729  }
4730  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4731    return GetBlock()->GetGraph()->GetFloatConstant(
4732        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4733  }
4734  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4735    return GetBlock()->GetGraph()->GetDoubleConstant(
4736        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4737  }
4738
4739  DECLARE_INSTRUCTION(Rem);
4740
4741 private:
4742  DISALLOW_COPY_AND_ASSIGN(HRem);
4743};
4744
4745class HDivZeroCheck FINAL : public HExpression<1> {
4746 public:
4747  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4748  // constructor.
4749  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4750      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4751    SetRawInputAt(0, value);
4752  }
4753
4754  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4755
4756  bool CanBeMoved() const OVERRIDE { return true; }
4757
4758  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4759    return true;
4760  }
4761
4762  bool NeedsEnvironment() const OVERRIDE { return true; }
4763  bool CanThrow() const OVERRIDE { return true; }
4764
4765  DECLARE_INSTRUCTION(DivZeroCheck);
4766
4767 private:
4768  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4769};
4770
4771class HShl FINAL : public HBinaryOperation {
4772 public:
4773  HShl(Primitive::Type result_type,
4774       HInstruction* value,
4775       HInstruction* distance,
4776       uint32_t dex_pc = kNoDexPc)
4777      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4778    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4779    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4780  }
4781
4782  template <typename T>
4783  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4784    return value << (distance & max_shift_distance);
4785  }
4786
4787  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4788    return GetBlock()->GetGraph()->GetIntConstant(
4789        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4790  }
4791  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4792    return GetBlock()->GetGraph()->GetLongConstant(
4793        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4794  }
4795  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4796                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4797    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4798    UNREACHABLE();
4799  }
4800  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4801                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4802    LOG(FATAL) << DebugName() << " is not defined for float values";
4803    UNREACHABLE();
4804  }
4805  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4806                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4807    LOG(FATAL) << DebugName() << " is not defined for double values";
4808    UNREACHABLE();
4809  }
4810
4811  DECLARE_INSTRUCTION(Shl);
4812
4813 private:
4814  DISALLOW_COPY_AND_ASSIGN(HShl);
4815};
4816
4817class HShr FINAL : public HBinaryOperation {
4818 public:
4819  HShr(Primitive::Type result_type,
4820       HInstruction* value,
4821       HInstruction* distance,
4822       uint32_t dex_pc = kNoDexPc)
4823      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4824    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4825    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4826  }
4827
4828  template <typename T>
4829  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4830    return value >> (distance & max_shift_distance);
4831  }
4832
4833  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4834    return GetBlock()->GetGraph()->GetIntConstant(
4835        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4836  }
4837  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4838    return GetBlock()->GetGraph()->GetLongConstant(
4839        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4840  }
4841  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4842                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4843    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4844    UNREACHABLE();
4845  }
4846  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4847                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4848    LOG(FATAL) << DebugName() << " is not defined for float values";
4849    UNREACHABLE();
4850  }
4851  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4852                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4853    LOG(FATAL) << DebugName() << " is not defined for double values";
4854    UNREACHABLE();
4855  }
4856
4857  DECLARE_INSTRUCTION(Shr);
4858
4859 private:
4860  DISALLOW_COPY_AND_ASSIGN(HShr);
4861};
4862
4863class HUShr FINAL : public HBinaryOperation {
4864 public:
4865  HUShr(Primitive::Type result_type,
4866        HInstruction* value,
4867        HInstruction* distance,
4868        uint32_t dex_pc = kNoDexPc)
4869      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4870    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4871    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4872  }
4873
4874  template <typename T>
4875  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4876    typedef typename std::make_unsigned<T>::type V;
4877    V ux = static_cast<V>(value);
4878    return static_cast<T>(ux >> (distance & max_shift_distance));
4879  }
4880
4881  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4882    return GetBlock()->GetGraph()->GetIntConstant(
4883        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4884  }
4885  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4886    return GetBlock()->GetGraph()->GetLongConstant(
4887        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4888  }
4889  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4890                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4891    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4892    UNREACHABLE();
4893  }
4894  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4895                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4896    LOG(FATAL) << DebugName() << " is not defined for float values";
4897    UNREACHABLE();
4898  }
4899  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4900                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4901    LOG(FATAL) << DebugName() << " is not defined for double values";
4902    UNREACHABLE();
4903  }
4904
4905  DECLARE_INSTRUCTION(UShr);
4906
4907 private:
4908  DISALLOW_COPY_AND_ASSIGN(HUShr);
4909};
4910
4911class HAnd FINAL : public HBinaryOperation {
4912 public:
4913  HAnd(Primitive::Type result_type,
4914       HInstruction* left,
4915       HInstruction* right,
4916       uint32_t dex_pc = kNoDexPc)
4917      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4918
4919  bool IsCommutative() const OVERRIDE { return true; }
4920
4921  template <typename T> static T Compute(T x, T y) { return x & y; }
4922
4923  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4924    return GetBlock()->GetGraph()->GetIntConstant(
4925        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4926  }
4927  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4928    return GetBlock()->GetGraph()->GetLongConstant(
4929        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4930  }
4931  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4932                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4933    LOG(FATAL) << DebugName() << " is not defined for float values";
4934    UNREACHABLE();
4935  }
4936  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4937                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4938    LOG(FATAL) << DebugName() << " is not defined for double values";
4939    UNREACHABLE();
4940  }
4941
4942  DECLARE_INSTRUCTION(And);
4943
4944 private:
4945  DISALLOW_COPY_AND_ASSIGN(HAnd);
4946};
4947
4948class HOr FINAL : public HBinaryOperation {
4949 public:
4950  HOr(Primitive::Type result_type,
4951      HInstruction* left,
4952      HInstruction* right,
4953      uint32_t dex_pc = kNoDexPc)
4954      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4955
4956  bool IsCommutative() const OVERRIDE { return true; }
4957
4958  template <typename T> static T Compute(T x, T y) { return x | y; }
4959
4960  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4961    return GetBlock()->GetGraph()->GetIntConstant(
4962        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4963  }
4964  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4965    return GetBlock()->GetGraph()->GetLongConstant(
4966        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4967  }
4968  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4969                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4970    LOG(FATAL) << DebugName() << " is not defined for float values";
4971    UNREACHABLE();
4972  }
4973  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4974                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4975    LOG(FATAL) << DebugName() << " is not defined for double values";
4976    UNREACHABLE();
4977  }
4978
4979  DECLARE_INSTRUCTION(Or);
4980
4981 private:
4982  DISALLOW_COPY_AND_ASSIGN(HOr);
4983};
4984
4985class HXor FINAL : public HBinaryOperation {
4986 public:
4987  HXor(Primitive::Type result_type,
4988       HInstruction* left,
4989       HInstruction* right,
4990       uint32_t dex_pc = kNoDexPc)
4991      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4992
4993  bool IsCommutative() const OVERRIDE { return true; }
4994
4995  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4996
4997  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4998    return GetBlock()->GetGraph()->GetIntConstant(
4999        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5000  }
5001  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5002    return GetBlock()->GetGraph()->GetLongConstant(
5003        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5004  }
5005  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5006                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5007    LOG(FATAL) << DebugName() << " is not defined for float values";
5008    UNREACHABLE();
5009  }
5010  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5011                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5012    LOG(FATAL) << DebugName() << " is not defined for double values";
5013    UNREACHABLE();
5014  }
5015
5016  DECLARE_INSTRUCTION(Xor);
5017
5018 private:
5019  DISALLOW_COPY_AND_ASSIGN(HXor);
5020};
5021
5022class HRor FINAL : public HBinaryOperation {
5023 public:
5024  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
5025    : HBinaryOperation(result_type, value, distance) {
5026    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
5027    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
5028  }
5029
5030  template <typename T>
5031  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5032    typedef typename std::make_unsigned<T>::type V;
5033    V ux = static_cast<V>(value);
5034    if ((distance & max_shift_value) == 0) {
5035      return static_cast<T>(ux);
5036    } else {
5037      const V reg_bits = sizeof(T) * 8;
5038      return static_cast<T>(ux >> (distance & max_shift_value)) |
5039                           (value << (reg_bits - (distance & max_shift_value)));
5040    }
5041  }
5042
5043  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5044    return GetBlock()->GetGraph()->GetIntConstant(
5045        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5046  }
5047  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5048    return GetBlock()->GetGraph()->GetLongConstant(
5049        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5050  }
5051  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5052                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5053    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5054    UNREACHABLE();
5055  }
5056  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5057                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5058    LOG(FATAL) << DebugName() << " is not defined for float values";
5059    UNREACHABLE();
5060  }
5061  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5062                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5063    LOG(FATAL) << DebugName() << " is not defined for double values";
5064    UNREACHABLE();
5065  }
5066
5067  DECLARE_INSTRUCTION(Ror);
5068
5069 private:
5070  DISALLOW_COPY_AND_ASSIGN(HRor);
5071};
5072
5073// The value of a parameter in this method. Its location depends on
5074// the calling convention.
5075class HParameterValue FINAL : public HExpression<0> {
5076 public:
5077  HParameterValue(const DexFile& dex_file,
5078                  dex::TypeIndex type_index,
5079                  uint8_t index,
5080                  Primitive::Type parameter_type,
5081                  bool is_this = false)
5082      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5083        dex_file_(dex_file),
5084        type_index_(type_index),
5085        index_(index) {
5086    SetPackedFlag<kFlagIsThis>(is_this);
5087    SetPackedFlag<kFlagCanBeNull>(!is_this);
5088  }
5089
5090  const DexFile& GetDexFile() const { return dex_file_; }
5091  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5092  uint8_t GetIndex() const { return index_; }
5093  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5094
5095  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5096  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5097
5098  DECLARE_INSTRUCTION(ParameterValue);
5099
5100 private:
5101  // Whether or not the parameter value corresponds to 'this' argument.
5102  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5103  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5104  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5105  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5106                "Too many packed fields.");
5107
5108  const DexFile& dex_file_;
5109  const dex::TypeIndex type_index_;
5110  // The index of this parameter in the parameters list. Must be less
5111  // than HGraph::number_of_in_vregs_.
5112  const uint8_t index_;
5113
5114  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5115};
5116
5117class HNot FINAL : public HUnaryOperation {
5118 public:
5119  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5120      : HUnaryOperation(result_type, input, dex_pc) {}
5121
5122  bool CanBeMoved() const OVERRIDE { return true; }
5123  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5124    return true;
5125  }
5126
5127  template <typename T> static T Compute(T x) { return ~x; }
5128
5129  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5130    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5131  }
5132  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5133    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5134  }
5135  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5136    LOG(FATAL) << DebugName() << " is not defined for float values";
5137    UNREACHABLE();
5138  }
5139  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5140    LOG(FATAL) << DebugName() << " is not defined for double values";
5141    UNREACHABLE();
5142  }
5143
5144  DECLARE_INSTRUCTION(Not);
5145
5146 private:
5147  DISALLOW_COPY_AND_ASSIGN(HNot);
5148};
5149
5150class HBooleanNot FINAL : public HUnaryOperation {
5151 public:
5152  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5153      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5154
5155  bool CanBeMoved() const OVERRIDE { return true; }
5156  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5157    return true;
5158  }
5159
5160  template <typename T> static bool Compute(T x) {
5161    DCHECK(IsUint<1>(x)) << x;
5162    return !x;
5163  }
5164
5165  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5166    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5167  }
5168  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5169    LOG(FATAL) << DebugName() << " is not defined for long values";
5170    UNREACHABLE();
5171  }
5172  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5173    LOG(FATAL) << DebugName() << " is not defined for float values";
5174    UNREACHABLE();
5175  }
5176  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5177    LOG(FATAL) << DebugName() << " is not defined for double values";
5178    UNREACHABLE();
5179  }
5180
5181  DECLARE_INSTRUCTION(BooleanNot);
5182
5183 private:
5184  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5185};
5186
5187class HTypeConversion FINAL : public HExpression<1> {
5188 public:
5189  // Instantiate a type conversion of `input` to `result_type`.
5190  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5191      : HExpression(result_type, SideEffects::None(), dex_pc) {
5192    SetRawInputAt(0, input);
5193    // Invariant: We should never generate a conversion to a Boolean value.
5194    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5195  }
5196
5197  HInstruction* GetInput() const { return InputAt(0); }
5198  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5199  Primitive::Type GetResultType() const { return GetType(); }
5200
5201  bool CanBeMoved() const OVERRIDE { return true; }
5202  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5203    return true;
5204  }
5205
5206  // Try to statically evaluate the conversion and return a HConstant
5207  // containing the result.  If the input cannot be converted, return nullptr.
5208  HConstant* TryStaticEvaluation() const;
5209
5210  DECLARE_INSTRUCTION(TypeConversion);
5211
5212 private:
5213  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5214};
5215
5216static constexpr uint32_t kNoRegNumber = -1;
5217
5218class HNullCheck FINAL : public HExpression<1> {
5219 public:
5220  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5221  // constructor.
5222  HNullCheck(HInstruction* value, uint32_t dex_pc)
5223      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5224    SetRawInputAt(0, value);
5225  }
5226
5227  bool CanBeMoved() const OVERRIDE { return true; }
5228  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5229    return true;
5230  }
5231
5232  bool NeedsEnvironment() const OVERRIDE { return true; }
5233
5234  bool CanThrow() const OVERRIDE { return true; }
5235
5236  bool CanBeNull() const OVERRIDE { return false; }
5237
5238
5239  DECLARE_INSTRUCTION(NullCheck);
5240
5241 private:
5242  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5243};
5244
5245// Embeds an ArtField and all the information required by the compiler. We cache
5246// that information to avoid requiring the mutator lock every time we need it.
5247class FieldInfo : public ValueObject {
5248 public:
5249  FieldInfo(ArtField* field,
5250            MemberOffset field_offset,
5251            Primitive::Type field_type,
5252            bool is_volatile,
5253            uint32_t index,
5254            uint16_t declaring_class_def_index,
5255            const DexFile& dex_file)
5256      : field_(field),
5257        field_offset_(field_offset),
5258        field_type_(field_type),
5259        is_volatile_(is_volatile),
5260        index_(index),
5261        declaring_class_def_index_(declaring_class_def_index),
5262        dex_file_(dex_file) {}
5263
5264  ArtField* GetField() const { return field_; }
5265  MemberOffset GetFieldOffset() const { return field_offset_; }
5266  Primitive::Type GetFieldType() const { return field_type_; }
5267  uint32_t GetFieldIndex() const { return index_; }
5268  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5269  const DexFile& GetDexFile() const { return dex_file_; }
5270  bool IsVolatile() const { return is_volatile_; }
5271
5272 private:
5273  ArtField* const field_;
5274  const MemberOffset field_offset_;
5275  const Primitive::Type field_type_;
5276  const bool is_volatile_;
5277  const uint32_t index_;
5278  const uint16_t declaring_class_def_index_;
5279  const DexFile& dex_file_;
5280};
5281
5282class HInstanceFieldGet FINAL : public HExpression<1> {
5283 public:
5284  HInstanceFieldGet(HInstruction* value,
5285                    ArtField* field,
5286                    Primitive::Type field_type,
5287                    MemberOffset field_offset,
5288                    bool is_volatile,
5289                    uint32_t field_idx,
5290                    uint16_t declaring_class_def_index,
5291                    const DexFile& dex_file,
5292                    uint32_t dex_pc)
5293      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5294        field_info_(field,
5295                    field_offset,
5296                    field_type,
5297                    is_volatile,
5298                    field_idx,
5299                    declaring_class_def_index,
5300                    dex_file) {
5301    SetRawInputAt(0, value);
5302  }
5303
5304  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5305
5306  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5307    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5308    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5309  }
5310
5311  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5312    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5313  }
5314
5315  size_t ComputeHashCode() const OVERRIDE {
5316    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5317  }
5318
5319  const FieldInfo& GetFieldInfo() const { return field_info_; }
5320  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5321  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5322  bool IsVolatile() const { return field_info_.IsVolatile(); }
5323
5324  DECLARE_INSTRUCTION(InstanceFieldGet);
5325
5326 private:
5327  const FieldInfo field_info_;
5328
5329  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5330};
5331
5332class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5333 public:
5334  HInstanceFieldSet(HInstruction* object,
5335                    HInstruction* value,
5336                    ArtField* field,
5337                    Primitive::Type field_type,
5338                    MemberOffset field_offset,
5339                    bool is_volatile,
5340                    uint32_t field_idx,
5341                    uint16_t declaring_class_def_index,
5342                    const DexFile& dex_file,
5343                    uint32_t dex_pc)
5344      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5345        field_info_(field,
5346                    field_offset,
5347                    field_type,
5348                    is_volatile,
5349                    field_idx,
5350                    declaring_class_def_index,
5351                    dex_file) {
5352    SetPackedFlag<kFlagValueCanBeNull>(true);
5353    SetRawInputAt(0, object);
5354    SetRawInputAt(1, value);
5355  }
5356
5357  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5358    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5359  }
5360
5361  const FieldInfo& GetFieldInfo() const { return field_info_; }
5362  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5363  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5364  bool IsVolatile() const { return field_info_.IsVolatile(); }
5365  HInstruction* GetValue() const { return InputAt(1); }
5366  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5367  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5368
5369  DECLARE_INSTRUCTION(InstanceFieldSet);
5370
5371 private:
5372  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5373  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5374  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5375                "Too many packed fields.");
5376
5377  const FieldInfo field_info_;
5378
5379  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5380};
5381
5382class HArrayGet FINAL : public HExpression<2> {
5383 public:
5384  HArrayGet(HInstruction* array,
5385            HInstruction* index,
5386            Primitive::Type type,
5387            uint32_t dex_pc,
5388            bool is_string_char_at = false)
5389      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5390    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5391    SetRawInputAt(0, array);
5392    SetRawInputAt(1, index);
5393  }
5394
5395  bool CanBeMoved() const OVERRIDE { return true; }
5396  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5397    return true;
5398  }
5399  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5400    // TODO: We can be smarter here.
5401    // Currently, unless the array is the result of NewArray, the array access is always
5402    // preceded by some form of null NullCheck necessary for the bounds check, usually
5403    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5404    // dynamic BCE. There are cases when these could be removed to produce better code.
5405    // If we ever add optimizations to do so we should allow an implicit check here
5406    // (as long as the address falls in the first page).
5407    //
5408    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5409    //     a = cond ? new int[1] : null;
5410    //     a[0];  // The Phi does not need bounds check for either input.
5411    return false;
5412  }
5413
5414  bool IsEquivalentOf(HArrayGet* other) const {
5415    bool result = (GetDexPc() == other->GetDexPc());
5416    if (kIsDebugBuild && result) {
5417      DCHECK_EQ(GetBlock(), other->GetBlock());
5418      DCHECK_EQ(GetArray(), other->GetArray());
5419      DCHECK_EQ(GetIndex(), other->GetIndex());
5420      if (Primitive::IsIntOrLongType(GetType())) {
5421        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5422      } else {
5423        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5424        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5425      }
5426    }
5427    return result;
5428  }
5429
5430  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5431
5432  HInstruction* GetArray() const { return InputAt(0); }
5433  HInstruction* GetIndex() const { return InputAt(1); }
5434
5435  DECLARE_INSTRUCTION(ArrayGet);
5436
5437 private:
5438  // We treat a String as an array, creating the HArrayGet from String.charAt()
5439  // intrinsic in the instruction simplifier. We can always determine whether
5440  // a particular HArrayGet is actually a String.charAt() by looking at the type
5441  // of the input but that requires holding the mutator lock, so we prefer to use
5442  // a flag, so that code generators don't need to do the locking.
5443  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5444  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5445  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5446                "Too many packed fields.");
5447
5448  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5449};
5450
5451class HArraySet FINAL : public HTemplateInstruction<3> {
5452 public:
5453  HArraySet(HInstruction* array,
5454            HInstruction* index,
5455            HInstruction* value,
5456            Primitive::Type expected_component_type,
5457            uint32_t dex_pc)
5458      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5459    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5460    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5461    SetPackedFlag<kFlagValueCanBeNull>(true);
5462    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5463    SetRawInputAt(0, array);
5464    SetRawInputAt(1, index);
5465    SetRawInputAt(2, value);
5466    // Make a best guess now, may be refined during SSA building.
5467    ComputeSideEffects();
5468  }
5469
5470  bool NeedsEnvironment() const OVERRIDE {
5471    // We call a runtime method to throw ArrayStoreException.
5472    return NeedsTypeCheck();
5473  }
5474
5475  // Can throw ArrayStoreException.
5476  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5477
5478  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5479    // TODO: Same as for ArrayGet.
5480    return false;
5481  }
5482
5483  void ClearNeedsTypeCheck() {
5484    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5485  }
5486
5487  void ClearValueCanBeNull() {
5488    SetPackedFlag<kFlagValueCanBeNull>(false);
5489  }
5490
5491  void SetStaticTypeOfArrayIsObjectArray() {
5492    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5493  }
5494
5495  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5496  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5497  bool StaticTypeOfArrayIsObjectArray() const {
5498    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5499  }
5500
5501  HInstruction* GetArray() const { return InputAt(0); }
5502  HInstruction* GetIndex() const { return InputAt(1); }
5503  HInstruction* GetValue() const { return InputAt(2); }
5504
5505  Primitive::Type GetComponentType() const {
5506    // The Dex format does not type floating point index operations. Since the
5507    // `expected_component_type_` is set during building and can therefore not
5508    // be correct, we also check what is the value type. If it is a floating
5509    // point type, we must use that type.
5510    Primitive::Type value_type = GetValue()->GetType();
5511    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5512        ? value_type
5513        : GetRawExpectedComponentType();
5514  }
5515
5516  Primitive::Type GetRawExpectedComponentType() const {
5517    return GetPackedField<ExpectedComponentTypeField>();
5518  }
5519
5520  void ComputeSideEffects() {
5521    Primitive::Type type = GetComponentType();
5522    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5523        SideEffectsForArchRuntimeCalls(type)));
5524  }
5525
5526  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5527    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5528  }
5529
5530  DECLARE_INSTRUCTION(ArraySet);
5531
5532 private:
5533  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5534  static constexpr size_t kFieldExpectedComponentTypeSize =
5535      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5536  static constexpr size_t kFlagNeedsTypeCheck =
5537      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5538  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5539  // Cached information for the reference_type_info_ so that codegen
5540  // does not need to inspect the static type.
5541  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5542  static constexpr size_t kNumberOfArraySetPackedBits =
5543      kFlagStaticTypeOfArrayIsObjectArray + 1;
5544  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5545  using ExpectedComponentTypeField =
5546      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5547
5548  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5549};
5550
5551class HArrayLength FINAL : public HExpression<1> {
5552 public:
5553  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5554      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5555    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5556    // Note that arrays do not change length, so the instruction does not
5557    // depend on any write.
5558    SetRawInputAt(0, array);
5559  }
5560
5561  bool CanBeMoved() const OVERRIDE { return true; }
5562  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5563    return true;
5564  }
5565  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5566    return obj == InputAt(0);
5567  }
5568
5569  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5570
5571  DECLARE_INSTRUCTION(ArrayLength);
5572
5573 private:
5574  // We treat a String as an array, creating the HArrayLength from String.length()
5575  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5576  // determine whether a particular HArrayLength is actually a String.length() by
5577  // looking at the type of the input but that requires holding the mutator lock, so
5578  // we prefer to use a flag, so that code generators don't need to do the locking.
5579  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5580  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5581  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5582                "Too many packed fields.");
5583
5584  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5585};
5586
5587class HBoundsCheck FINAL : public HExpression<2> {
5588 public:
5589  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5590  // constructor.
5591  HBoundsCheck(HInstruction* index,
5592               HInstruction* length,
5593               uint32_t dex_pc,
5594               bool string_char_at = false)
5595      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5596    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5597    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5598    SetRawInputAt(0, index);
5599    SetRawInputAt(1, length);
5600  }
5601
5602  bool CanBeMoved() const OVERRIDE { return true; }
5603  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5604    return true;
5605  }
5606
5607  bool NeedsEnvironment() const OVERRIDE { return true; }
5608
5609  bool CanThrow() const OVERRIDE { return true; }
5610
5611  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5612
5613  HInstruction* GetIndex() const { return InputAt(0); }
5614
5615  DECLARE_INSTRUCTION(BoundsCheck);
5616
5617 private:
5618  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5619
5620  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5621};
5622
5623class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5624 public:
5625  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5626      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5627
5628  bool NeedsEnvironment() const OVERRIDE {
5629    return true;
5630  }
5631
5632  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5633  SlowPathCode* GetSlowPath() const { return slow_path_; }
5634
5635  DECLARE_INSTRUCTION(SuspendCheck);
5636
5637 private:
5638  // Only used for code generation, in order to share the same slow path between back edges
5639  // of a same loop.
5640  SlowPathCode* slow_path_;
5641
5642  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5643};
5644
5645// Pseudo-instruction which provides the native debugger with mapping information.
5646// It ensures that we can generate line number and local variables at this point.
5647class HNativeDebugInfo : public HTemplateInstruction<0> {
5648 public:
5649  explicit HNativeDebugInfo(uint32_t dex_pc)
5650      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5651
5652  bool NeedsEnvironment() const OVERRIDE {
5653    return true;
5654  }
5655
5656  DECLARE_INSTRUCTION(NativeDebugInfo);
5657
5658 private:
5659  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5660};
5661
5662/**
5663 * Instruction to load a Class object.
5664 */
5665class HLoadClass FINAL : public HInstruction {
5666 public:
5667  // Determines how to load the Class.
5668  enum class LoadKind {
5669    // We cannot load this class. See HSharpening::SharpenLoadClass.
5670    kInvalid = -1,
5671
5672    // Use the Class* from the method's own ArtMethod*.
5673    kReferrersClass,
5674
5675    // Use PC-relative boot image Class* address that will be known at link time.
5676    // Used for boot image classes referenced by boot image code.
5677    kBootImageLinkTimePcRelative,
5678
5679    // Use a known boot image Class* address, embedded in the code by the codegen.
5680    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5681    kBootImageAddress,
5682
5683    // Load from an entry in the .bss section using a PC-relative load.
5684    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5685    kBssEntry,
5686
5687    // Load from the root table associated with the JIT compiled method.
5688    kJitTableAddress,
5689
5690    // Load using a simple runtime call. This is the fall-back load kind when
5691    // the codegen is unable to use another appropriate kind.
5692    kRuntimeCall,
5693
5694    kLast = kRuntimeCall
5695  };
5696
5697  HLoadClass(HCurrentMethod* current_method,
5698             dex::TypeIndex type_index,
5699             const DexFile& dex_file,
5700             Handle<mirror::Class> klass,
5701             bool is_referrers_class,
5702             uint32_t dex_pc,
5703             bool needs_access_check)
5704      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5705        special_input_(HUserRecord<HInstruction*>(current_method)),
5706        type_index_(type_index),
5707        dex_file_(dex_file),
5708        klass_(klass),
5709        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5710    // Referrers class should not need access check. We never inline unverified
5711    // methods so we can't possibly end up in this situation.
5712    DCHECK(!is_referrers_class || !needs_access_check);
5713
5714    SetPackedField<LoadKindField>(
5715        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
5716    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5717    SetPackedFlag<kFlagIsInBootImage>(false);
5718    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5719  }
5720
5721  void SetLoadKind(LoadKind load_kind);
5722
5723  LoadKind GetLoadKind() const {
5724    return GetPackedField<LoadKindField>();
5725  }
5726
5727  bool CanBeMoved() const OVERRIDE { return true; }
5728
5729  bool InstructionDataEquals(const HInstruction* other) const;
5730
5731  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5732
5733  bool CanBeNull() const OVERRIDE { return false; }
5734
5735  bool NeedsEnvironment() const OVERRIDE {
5736    return CanCallRuntime();
5737  }
5738
5739  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5740    // The entrypoint the code generator is going to call does not do
5741    // clinit of the class.
5742    DCHECK(!NeedsAccessCheck());
5743    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5744  }
5745
5746  bool CanCallRuntime() const {
5747    return NeedsAccessCheck() ||
5748           MustGenerateClinitCheck() ||
5749           GetLoadKind() == LoadKind::kRuntimeCall ||
5750           GetLoadKind() == LoadKind::kBssEntry;
5751  }
5752
5753  bool CanThrow() const OVERRIDE {
5754    return NeedsAccessCheck() ||
5755           MustGenerateClinitCheck() ||
5756           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5757           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5758           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5759           ((GetLoadKind() == LoadKind::kRuntimeCall ||
5760             GetLoadKind() == LoadKind::kBssEntry) &&
5761            !IsInBootImage());
5762  }
5763
5764  ReferenceTypeInfo GetLoadedClassRTI() {
5765    return loaded_class_rti_;
5766  }
5767
5768  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5769    // Make sure we only set exact types (the loaded class should never be merged).
5770    DCHECK(rti.IsExact());
5771    loaded_class_rti_ = rti;
5772  }
5773
5774  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5775  const DexFile& GetDexFile() const { return dex_file_; }
5776
5777  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5778    return GetLoadKind() == LoadKind::kRuntimeCall;
5779  }
5780
5781  static SideEffects SideEffectsForArchRuntimeCalls() {
5782    return SideEffects::CanTriggerGC();
5783  }
5784
5785  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5786  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5787  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5788  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5789
5790  void MarkInBootImage() {
5791    SetPackedFlag<kFlagIsInBootImage>(true);
5792  }
5793
5794  void AddSpecialInput(HInstruction* special_input);
5795
5796  using HInstruction::GetInputRecords;  // Keep the const version visible.
5797  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5798    return ArrayRef<HUserRecord<HInstruction*>>(
5799        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5800  }
5801
5802  Primitive::Type GetType() const OVERRIDE {
5803    return Primitive::kPrimNot;
5804  }
5805
5806  Handle<mirror::Class> GetClass() const {
5807    return klass_;
5808  }
5809
5810  DECLARE_INSTRUCTION(LoadClass);
5811
5812 private:
5813  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5814  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5815  // Whether this instruction must generate the initialization check.
5816  // Used for code generation.
5817  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5818  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5819  static constexpr size_t kFieldLoadKindSize =
5820      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5821  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5822  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5823  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5824
5825  static bool HasTypeReference(LoadKind load_kind) {
5826    return load_kind == LoadKind::kReferrersClass ||
5827        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5828        load_kind == LoadKind::kBssEntry ||
5829        load_kind == LoadKind::kRuntimeCall;
5830  }
5831
5832  void SetLoadKindInternal(LoadKind load_kind);
5833
5834  // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
5835  // For other load kinds it's empty or possibly some architecture-specific instruction
5836  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5837  HUserRecord<HInstruction*> special_input_;
5838
5839  // A type index and dex file where the class can be accessed. The dex file can be:
5840  // - The compiling method's dex file if the class is defined there too.
5841  // - The compiling method's dex file if the class is referenced there.
5842  // - The dex file where the class is defined. When the load kind can only be
5843  //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
5844  const dex::TypeIndex type_index_;
5845  const DexFile& dex_file_;
5846
5847  Handle<mirror::Class> klass_;
5848
5849  ReferenceTypeInfo loaded_class_rti_;
5850
5851  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5852};
5853std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5854
5855// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5856inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5857  // The special input is used for PC-relative loads on some architectures,
5858  // including literal pool loads, which are PC-relative too.
5859  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5860         GetLoadKind() == LoadKind::kBootImageAddress ||
5861         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5862  DCHECK(special_input_.GetInstruction() == nullptr);
5863  special_input_ = HUserRecord<HInstruction*>(special_input);
5864  special_input->AddUseAt(this, 0);
5865}
5866
5867class HLoadString FINAL : public HInstruction {
5868 public:
5869  // Determines how to load the String.
5870  enum class LoadKind {
5871    // Use PC-relative boot image String* address that will be known at link time.
5872    // Used for boot image strings referenced by boot image code.
5873    kBootImageLinkTimePcRelative,
5874
5875    // Use a known boot image String* address, embedded in the code by the codegen.
5876    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5877    kBootImageAddress,
5878
5879    // Load from an entry in the .bss section using a PC-relative load.
5880    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5881    kBssEntry,
5882
5883    // Load from the root table associated with the JIT compiled method.
5884    kJitTableAddress,
5885
5886    // Load using a simple runtime call. This is the fall-back load kind when
5887    // the codegen is unable to use another appropriate kind.
5888    kRuntimeCall,
5889
5890    kLast = kRuntimeCall,
5891  };
5892
5893  HLoadString(HCurrentMethod* current_method,
5894              dex::StringIndex string_index,
5895              const DexFile& dex_file,
5896              uint32_t dex_pc)
5897      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5898        special_input_(HUserRecord<HInstruction*>(current_method)),
5899        string_index_(string_index),
5900        dex_file_(dex_file) {
5901    SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
5902  }
5903
5904  void SetLoadKind(LoadKind load_kind);
5905
5906  LoadKind GetLoadKind() const {
5907    return GetPackedField<LoadKindField>();
5908  }
5909
5910  const DexFile& GetDexFile() const {
5911    return dex_file_;
5912  }
5913
5914  dex::StringIndex GetStringIndex() const {
5915    return string_index_;
5916  }
5917
5918  Handle<mirror::String> GetString() const {
5919    return string_;
5920  }
5921
5922  void SetString(Handle<mirror::String> str) {
5923    string_ = str;
5924  }
5925
5926  bool CanBeMoved() const OVERRIDE { return true; }
5927
5928  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5929
5930  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5931
5932  // Will call the runtime if we need to load the string through
5933  // the dex cache and the string is not guaranteed to be there yet.
5934  bool NeedsEnvironment() const OVERRIDE {
5935    LoadKind load_kind = GetLoadKind();
5936    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5937        load_kind == LoadKind::kBootImageAddress ||
5938        load_kind == LoadKind::kJitTableAddress) {
5939      return false;
5940    }
5941    return true;
5942  }
5943
5944  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5945    return GetLoadKind() == LoadKind::kRuntimeCall;
5946  }
5947
5948  bool CanBeNull() const OVERRIDE { return false; }
5949  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5950
5951  static SideEffects SideEffectsForArchRuntimeCalls() {
5952    return SideEffects::CanTriggerGC();
5953  }
5954
5955  void AddSpecialInput(HInstruction* special_input);
5956
5957  using HInstruction::GetInputRecords;  // Keep the const version visible.
5958  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5959    return ArrayRef<HUserRecord<HInstruction*>>(
5960        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5961  }
5962
5963  Primitive::Type GetType() const OVERRIDE {
5964    return Primitive::kPrimNot;
5965  }
5966
5967  DECLARE_INSTRUCTION(LoadString);
5968
5969 private:
5970  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5971  static constexpr size_t kFieldLoadKindSize =
5972      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5973  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5974  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5975  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5976
5977  void SetLoadKindInternal(LoadKind load_kind);
5978
5979  // The special input is the HCurrentMethod for kRuntimeCall.
5980  // For other load kinds it's empty or possibly some architecture-specific instruction
5981  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5982  HUserRecord<HInstruction*> special_input_;
5983
5984  dex::StringIndex string_index_;
5985  const DexFile& dex_file_;
5986
5987  Handle<mirror::String> string_;
5988
5989  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5990};
5991std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5992
5993// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5994inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5995  // The special input is used for PC-relative loads on some architectures,
5996  // including literal pool loads, which are PC-relative too.
5997  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5998         GetLoadKind() == LoadKind::kBssEntry ||
5999         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
6000  // HLoadString::GetInputRecords() returns an empty array at this point,
6001  // so use the GetInputRecords() from the base class to set the input record.
6002  DCHECK(special_input_.GetInstruction() == nullptr);
6003  special_input_ = HUserRecord<HInstruction*>(special_input);
6004  special_input->AddUseAt(this, 0);
6005}
6006
6007/**
6008 * Performs an initialization check on its Class object input.
6009 */
6010class HClinitCheck FINAL : public HExpression<1> {
6011 public:
6012  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6013      : HExpression(
6014            Primitive::kPrimNot,
6015            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6016            dex_pc) {
6017    SetRawInputAt(0, constant);
6018  }
6019
6020  bool CanBeMoved() const OVERRIDE { return true; }
6021  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6022    return true;
6023  }
6024
6025  bool NeedsEnvironment() const OVERRIDE {
6026    // May call runtime to initialize the class.
6027    return true;
6028  }
6029
6030  bool CanThrow() const OVERRIDE { return true; }
6031
6032  HLoadClass* GetLoadClass() const {
6033    DCHECK(InputAt(0)->IsLoadClass());
6034    return InputAt(0)->AsLoadClass();
6035  }
6036
6037  DECLARE_INSTRUCTION(ClinitCheck);
6038
6039 private:
6040  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6041};
6042
6043class HStaticFieldGet FINAL : public HExpression<1> {
6044 public:
6045  HStaticFieldGet(HInstruction* cls,
6046                  ArtField* field,
6047                  Primitive::Type field_type,
6048                  MemberOffset field_offset,
6049                  bool is_volatile,
6050                  uint32_t field_idx,
6051                  uint16_t declaring_class_def_index,
6052                  const DexFile& dex_file,
6053                  uint32_t dex_pc)
6054      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6055        field_info_(field,
6056                    field_offset,
6057                    field_type,
6058                    is_volatile,
6059                    field_idx,
6060                    declaring_class_def_index,
6061                    dex_file) {
6062    SetRawInputAt(0, cls);
6063  }
6064
6065
6066  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6067
6068  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6069    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6070    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6071  }
6072
6073  size_t ComputeHashCode() const OVERRIDE {
6074    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6075  }
6076
6077  const FieldInfo& GetFieldInfo() const { return field_info_; }
6078  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6079  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6080  bool IsVolatile() const { return field_info_.IsVolatile(); }
6081
6082  DECLARE_INSTRUCTION(StaticFieldGet);
6083
6084 private:
6085  const FieldInfo field_info_;
6086
6087  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6088};
6089
6090class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6091 public:
6092  HStaticFieldSet(HInstruction* cls,
6093                  HInstruction* value,
6094                  ArtField* field,
6095                  Primitive::Type field_type,
6096                  MemberOffset field_offset,
6097                  bool is_volatile,
6098                  uint32_t field_idx,
6099                  uint16_t declaring_class_def_index,
6100                  const DexFile& dex_file,
6101                  uint32_t dex_pc)
6102      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6103        field_info_(field,
6104                    field_offset,
6105                    field_type,
6106                    is_volatile,
6107                    field_idx,
6108                    declaring_class_def_index,
6109                    dex_file) {
6110    SetPackedFlag<kFlagValueCanBeNull>(true);
6111    SetRawInputAt(0, cls);
6112    SetRawInputAt(1, value);
6113  }
6114
6115  const FieldInfo& GetFieldInfo() const { return field_info_; }
6116  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6117  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6118  bool IsVolatile() const { return field_info_.IsVolatile(); }
6119
6120  HInstruction* GetValue() const { return InputAt(1); }
6121  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6122  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6123
6124  DECLARE_INSTRUCTION(StaticFieldSet);
6125
6126 private:
6127  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6128  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6129  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6130                "Too many packed fields.");
6131
6132  const FieldInfo field_info_;
6133
6134  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6135};
6136
6137class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6138 public:
6139  HUnresolvedInstanceFieldGet(HInstruction* obj,
6140                              Primitive::Type field_type,
6141                              uint32_t field_index,
6142                              uint32_t dex_pc)
6143      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6144        field_index_(field_index) {
6145    SetRawInputAt(0, obj);
6146  }
6147
6148  bool NeedsEnvironment() const OVERRIDE { return true; }
6149  bool CanThrow() const OVERRIDE { return true; }
6150
6151  Primitive::Type GetFieldType() const { return GetType(); }
6152  uint32_t GetFieldIndex() const { return field_index_; }
6153
6154  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6155
6156 private:
6157  const uint32_t field_index_;
6158
6159  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6160};
6161
6162class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6163 public:
6164  HUnresolvedInstanceFieldSet(HInstruction* obj,
6165                              HInstruction* value,
6166                              Primitive::Type field_type,
6167                              uint32_t field_index,
6168                              uint32_t dex_pc)
6169      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6170        field_index_(field_index) {
6171    SetPackedField<FieldTypeField>(field_type);
6172    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6173    SetRawInputAt(0, obj);
6174    SetRawInputAt(1, value);
6175  }
6176
6177  bool NeedsEnvironment() const OVERRIDE { return true; }
6178  bool CanThrow() const OVERRIDE { return true; }
6179
6180  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6181  uint32_t GetFieldIndex() const { return field_index_; }
6182
6183  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6184
6185 private:
6186  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6187  static constexpr size_t kFieldFieldTypeSize =
6188      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6189  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6190      kFieldFieldType + kFieldFieldTypeSize;
6191  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6192                "Too many packed fields.");
6193  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6194
6195  const uint32_t field_index_;
6196
6197  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6198};
6199
6200class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6201 public:
6202  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6203                            uint32_t field_index,
6204                            uint32_t dex_pc)
6205      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6206        field_index_(field_index) {
6207  }
6208
6209  bool NeedsEnvironment() const OVERRIDE { return true; }
6210  bool CanThrow() const OVERRIDE { return true; }
6211
6212  Primitive::Type GetFieldType() const { return GetType(); }
6213  uint32_t GetFieldIndex() const { return field_index_; }
6214
6215  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6216
6217 private:
6218  const uint32_t field_index_;
6219
6220  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6221};
6222
6223class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6224 public:
6225  HUnresolvedStaticFieldSet(HInstruction* value,
6226                            Primitive::Type field_type,
6227                            uint32_t field_index,
6228                            uint32_t dex_pc)
6229      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6230        field_index_(field_index) {
6231    SetPackedField<FieldTypeField>(field_type);
6232    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6233    SetRawInputAt(0, value);
6234  }
6235
6236  bool NeedsEnvironment() const OVERRIDE { return true; }
6237  bool CanThrow() const OVERRIDE { return true; }
6238
6239  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6240  uint32_t GetFieldIndex() const { return field_index_; }
6241
6242  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6243
6244 private:
6245  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6246  static constexpr size_t kFieldFieldTypeSize =
6247      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6248  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6249      kFieldFieldType + kFieldFieldTypeSize;
6250  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6251                "Too many packed fields.");
6252  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6253
6254  const uint32_t field_index_;
6255
6256  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6257};
6258
6259// Implement the move-exception DEX instruction.
6260class HLoadException FINAL : public HExpression<0> {
6261 public:
6262  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6263      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6264
6265  bool CanBeNull() const OVERRIDE { return false; }
6266
6267  DECLARE_INSTRUCTION(LoadException);
6268
6269 private:
6270  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6271};
6272
6273// Implicit part of move-exception which clears thread-local exception storage.
6274// Must not be removed because the runtime expects the TLS to get cleared.
6275class HClearException FINAL : public HTemplateInstruction<0> {
6276 public:
6277  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6278      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6279
6280  DECLARE_INSTRUCTION(ClearException);
6281
6282 private:
6283  DISALLOW_COPY_AND_ASSIGN(HClearException);
6284};
6285
6286class HThrow FINAL : public HTemplateInstruction<1> {
6287 public:
6288  HThrow(HInstruction* exception, uint32_t dex_pc)
6289      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6290    SetRawInputAt(0, exception);
6291  }
6292
6293  bool IsControlFlow() const OVERRIDE { return true; }
6294
6295  bool NeedsEnvironment() const OVERRIDE { return true; }
6296
6297  bool CanThrow() const OVERRIDE { return true; }
6298
6299
6300  DECLARE_INSTRUCTION(Throw);
6301
6302 private:
6303  DISALLOW_COPY_AND_ASSIGN(HThrow);
6304};
6305
6306/**
6307 * Implementation strategies for the code generator of a HInstanceOf
6308 * or `HCheckCast`.
6309 */
6310enum class TypeCheckKind {
6311  kUnresolvedCheck,       // Check against an unresolved type.
6312  kExactCheck,            // Can do a single class compare.
6313  kClassHierarchyCheck,   // Can just walk the super class chain.
6314  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6315  kInterfaceCheck,        // No optimization yet when checking against an interface.
6316  kArrayObjectCheck,      // Can just check if the array is not primitive.
6317  kArrayCheck,            // No optimization yet when checking against a generic array.
6318  kLast = kArrayCheck
6319};
6320
6321std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6322
6323class HInstanceOf FINAL : public HExpression<2> {
6324 public:
6325  HInstanceOf(HInstruction* object,
6326              HLoadClass* constant,
6327              TypeCheckKind check_kind,
6328              uint32_t dex_pc)
6329      : HExpression(Primitive::kPrimBoolean,
6330                    SideEffectsForArchRuntimeCalls(check_kind),
6331                    dex_pc) {
6332    SetPackedField<TypeCheckKindField>(check_kind);
6333    SetPackedFlag<kFlagMustDoNullCheck>(true);
6334    SetRawInputAt(0, object);
6335    SetRawInputAt(1, constant);
6336  }
6337
6338  bool CanBeMoved() const OVERRIDE { return true; }
6339
6340  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6341    return true;
6342  }
6343
6344  bool NeedsEnvironment() const OVERRIDE {
6345    return CanCallRuntime(GetTypeCheckKind());
6346  }
6347
6348  // Used only in code generation.
6349  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6350  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6351  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6352  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6353
6354  static bool CanCallRuntime(TypeCheckKind check_kind) {
6355    // Mips currently does runtime calls for any other checks.
6356    return check_kind != TypeCheckKind::kExactCheck;
6357  }
6358
6359  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6360    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6361  }
6362
6363  DECLARE_INSTRUCTION(InstanceOf);
6364
6365 private:
6366  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6367  static constexpr size_t kFieldTypeCheckKindSize =
6368      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6369  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6370  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6371  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6372  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6373
6374  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6375};
6376
6377class HBoundType FINAL : public HExpression<1> {
6378 public:
6379  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6380      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6381        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6382    SetPackedFlag<kFlagUpperCanBeNull>(true);
6383    SetPackedFlag<kFlagCanBeNull>(true);
6384    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6385    SetRawInputAt(0, input);
6386  }
6387
6388  // {Get,Set}Upper* should only be used in reference type propagation.
6389  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6390  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6391  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6392
6393  void SetCanBeNull(bool can_be_null) {
6394    DCHECK(GetUpperCanBeNull() || !can_be_null);
6395    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6396  }
6397
6398  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6399
6400  DECLARE_INSTRUCTION(BoundType);
6401
6402 private:
6403  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6404  // is false then CanBeNull() cannot be true).
6405  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6406  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6407  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6408  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6409
6410  // Encodes the most upper class that this instruction can have. In other words
6411  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6412  // It is used to bound the type in cases like:
6413  //   if (x instanceof ClassX) {
6414  //     // uper_bound_ will be ClassX
6415  //   }
6416  ReferenceTypeInfo upper_bound_;
6417
6418  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6419};
6420
6421class HCheckCast FINAL : public HTemplateInstruction<2> {
6422 public:
6423  HCheckCast(HInstruction* object,
6424             HLoadClass* constant,
6425             TypeCheckKind check_kind,
6426             uint32_t dex_pc)
6427      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6428    SetPackedField<TypeCheckKindField>(check_kind);
6429    SetPackedFlag<kFlagMustDoNullCheck>(true);
6430    SetRawInputAt(0, object);
6431    SetRawInputAt(1, constant);
6432  }
6433
6434  bool CanBeMoved() const OVERRIDE { return true; }
6435
6436  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6437    return true;
6438  }
6439
6440  bool NeedsEnvironment() const OVERRIDE {
6441    // Instruction may throw a CheckCastError.
6442    return true;
6443  }
6444
6445  bool CanThrow() const OVERRIDE { return true; }
6446
6447  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6448  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6449  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6450  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6451
6452  DECLARE_INSTRUCTION(CheckCast);
6453
6454 private:
6455  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6456  static constexpr size_t kFieldTypeCheckKindSize =
6457      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6458  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6459  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6460  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6461  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6462
6463  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6464};
6465
6466/**
6467 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6468 * @details We define the combined barrier types that are actually required
6469 * by the Java Memory Model, rather than using exactly the terminology from
6470 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6471 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6472 * store atomicity issues, and the recipes there are not always entirely sufficient.
6473 * The current recipe is as follows:
6474 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6475 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6476 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6477 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6478 *    class has final fields.
6479 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6480 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6481 */
6482enum MemBarrierKind {
6483  kAnyStore,
6484  kLoadAny,
6485  kStoreStore,
6486  kAnyAny,
6487  kNTStoreStore,
6488  kLastBarrierKind = kNTStoreStore
6489};
6490std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6491
6492class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6493 public:
6494  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6495      : HTemplateInstruction(
6496            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6497    SetPackedField<BarrierKindField>(barrier_kind);
6498  }
6499
6500  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6501
6502  DECLARE_INSTRUCTION(MemoryBarrier);
6503
6504 private:
6505  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6506  static constexpr size_t kFieldBarrierKindSize =
6507      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6508  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6509      kFieldBarrierKind + kFieldBarrierKindSize;
6510  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6511                "Too many packed fields.");
6512  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6513
6514  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6515};
6516
6517// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6518// the specified object(s), with respect to any subsequent store that might "publish"
6519// (i.e. make visible) the specified object to another thread.
6520//
6521// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6522// for all final fields (that were set) at the end of the invoked constructor.
6523//
6524// The constructor fence models the freeze actions for the final fields of an object
6525// being constructed (semantically at the end of the constructor). Constructor fences
6526// have a per-object affinity; two separate objects being constructed get two separate
6527// constructor fences.
6528//
6529// (Note: that if calling a super-constructor or forwarding to another constructor,
6530// the freezes would happen at the end of *that* constructor being invoked).
6531//
6532// The memory model guarantees that when the object being constructed is "published" after
6533// constructor completion (i.e. escapes the current thread via a store), then any final field
6534// writes must be observable on other threads (once they observe that publication).
6535//
6536// Further, anything written before the freeze, and read by dereferencing through the final field,
6537// must also be visible (so final object field could itself have an object with non-final fields;
6538// yet the freeze must also extend to them).
6539//
6540// Constructor example:
6541//
6542//     class HasFinal {
6543//        final int field;                              Optimizing IR for <init>()V:
6544//        HasFinal() {
6545//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6546//          // freeze(this.field);                      HConstructorFence(this)
6547//        }                                             HReturn
6548//     }
6549//
6550// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6551// already-initialized classes; in that case the allocation must act as a "default-initializer"
6552// of the object which effectively writes the class pointer "final field".
6553//
6554// For example, we can model default-initialiation as roughly the equivalent of the following:
6555//
6556//     class Object {
6557//       private final Class header;
6558//     }
6559//
6560//  Java code:                                           Optimizing IR:
6561//
6562//     T new_instance<T>() {
6563//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6564//       obj.header = T.class;                           // header write is done by above call.
6565//       // freeze(obj.header)                           HConstructorFence(obj)
6566//       return (T)obj;
6567//     }
6568//
6569// See also:
6570// * CompilerDriver::RequiresConstructorBarrier
6571// * QuasiAtomic::ThreadFenceForConstructor
6572//
6573class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6574                                  // A fence has variable inputs because the inputs can be removed
6575                                  // after prepare_for_register_allocation phase.
6576                                  // (TODO: In the future a fence could freeze multiple objects
6577                                  //        after merging two fences together.)
6578 public:
6579  // `fence_object` is the reference that needs to be protected for correct publication.
6580  //
6581  // It makes sense in the following situations:
6582  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6583  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6584  //
6585  // After construction the `fence_object` becomes the 0th input.
6586  // This is not an input in a real sense, but just a convenient place to stash the information
6587  // about the associated object.
6588  HConstructorFence(HInstruction* fence_object,
6589                    uint32_t dex_pc,
6590                    ArenaAllocator* arena)
6591    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6592    // constraints described in the class header. We claim that these SideEffects constraints
6593    // enforce a superset of the real constraints.
6594    //
6595    // The ordering described above is conservatively modeled with SideEffects as follows:
6596    //
6597    // * To prevent reordering of the publication stores:
6598    // ----> "Reads of objects" is the initial SideEffect.
6599    // * For every primitive final field store in the constructor:
6600    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6601    // * If there are any stores to reference final fields in the constructor:
6602    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6603    //       that are reachable from `fence_object` also need to be prevented for reordering
6604    //       (and we do not want to do alias analysis to figure out what those stores are).
6605    //
6606    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6607    // even more conservative approach than the one described above, and prevents all of the
6608    // above reordering without analyzing any of the instructions in the constructor.
6609    //
6610    // If in a later phase we discover that there are no writes to reference final fields,
6611    // we can refine the side effect to a smaller set of type reads (see above constraints).
6612      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6613                                      dex_pc,
6614                                      arena,
6615                                      /* number_of_inputs */ 1,
6616                                      kArenaAllocConstructorFenceInputs) {
6617    DCHECK(fence_object != nullptr);
6618    SetRawInputAt(0, fence_object);
6619  }
6620
6621  // The object associated with this constructor fence.
6622  //
6623  // (Note: This will be null after the prepare_for_register_allocation phase,
6624  // as all constructor fence inputs are removed there).
6625  HInstruction* GetFenceObject() const {
6626    return InputAt(0);
6627  }
6628
6629  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6630  // - Delete `fence_use` from `this`'s use list.
6631  // - Delete `this` from `fence_use`'s inputs list.
6632  // - If the `fence_use` is dead, remove it from the graph.
6633  //
6634  // A fence is considered dead once it no longer has any uses
6635  // and all of the inputs are dead.
6636  //
6637  // This must *not* be called during/after prepare_for_register_allocation,
6638  // because that removes all the inputs to the fences but the fence is actually
6639  // still considered live.
6640  static void RemoveConstructorFences(HInstruction* instruction);
6641
6642  // Check if this constructor fence is protecting
6643  // an HNewInstance or HNewArray that is also the immediate
6644  // predecessor of `this`.
6645  //
6646  // Returns the associated HNewArray or HNewInstance,
6647  // or null otherwise.
6648  HInstruction* GetAssociatedAllocation();
6649
6650  DECLARE_INSTRUCTION(ConstructorFence);
6651
6652 private:
6653  DISALLOW_COPY_AND_ASSIGN(HConstructorFence);
6654};
6655
6656class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6657 public:
6658  enum class OperationKind {
6659    kEnter,
6660    kExit,
6661    kLast = kExit
6662  };
6663
6664  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6665    : HTemplateInstruction(
6666          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6667          dex_pc) {
6668    SetPackedField<OperationKindField>(kind);
6669    SetRawInputAt(0, object);
6670  }
6671
6672  // Instruction may go into runtime, so we need an environment.
6673  bool NeedsEnvironment() const OVERRIDE { return true; }
6674
6675  bool CanThrow() const OVERRIDE {
6676    // Verifier guarantees that monitor-exit cannot throw.
6677    // This is important because it allows the HGraphBuilder to remove
6678    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6679    return IsEnter();
6680  }
6681
6682  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6683  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6684
6685  DECLARE_INSTRUCTION(MonitorOperation);
6686
6687 private:
6688  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6689  static constexpr size_t kFieldOperationKindSize =
6690      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6691  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6692      kFieldOperationKind + kFieldOperationKindSize;
6693  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6694                "Too many packed fields.");
6695  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6696
6697 private:
6698  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6699};
6700
6701class HSelect FINAL : public HExpression<3> {
6702 public:
6703  HSelect(HInstruction* condition,
6704          HInstruction* true_value,
6705          HInstruction* false_value,
6706          uint32_t dex_pc)
6707      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6708    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6709
6710    // First input must be `true_value` or `false_value` to allow codegens to
6711    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6712    // that architectures which implement HSelect as a conditional move also
6713    // will not need to invert the condition.
6714    SetRawInputAt(0, false_value);
6715    SetRawInputAt(1, true_value);
6716    SetRawInputAt(2, condition);
6717  }
6718
6719  HInstruction* GetFalseValue() const { return InputAt(0); }
6720  HInstruction* GetTrueValue() const { return InputAt(1); }
6721  HInstruction* GetCondition() const { return InputAt(2); }
6722
6723  bool CanBeMoved() const OVERRIDE { return true; }
6724  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6725    return true;
6726  }
6727
6728  bool CanBeNull() const OVERRIDE {
6729    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6730  }
6731
6732  DECLARE_INSTRUCTION(Select);
6733
6734 private:
6735  DISALLOW_COPY_AND_ASSIGN(HSelect);
6736};
6737
6738class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6739 public:
6740  MoveOperands(Location source,
6741               Location destination,
6742               Primitive::Type type,
6743               HInstruction* instruction)
6744      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6745
6746  Location GetSource() const { return source_; }
6747  Location GetDestination() const { return destination_; }
6748
6749  void SetSource(Location value) { source_ = value; }
6750  void SetDestination(Location value) { destination_ = value; }
6751
6752  // The parallel move resolver marks moves as "in-progress" by clearing the
6753  // destination (but not the source).
6754  Location MarkPending() {
6755    DCHECK(!IsPending());
6756    Location dest = destination_;
6757    destination_ = Location::NoLocation();
6758    return dest;
6759  }
6760
6761  void ClearPending(Location dest) {
6762    DCHECK(IsPending());
6763    destination_ = dest;
6764  }
6765
6766  bool IsPending() const {
6767    DCHECK(source_.IsValid() || destination_.IsInvalid());
6768    return destination_.IsInvalid() && source_.IsValid();
6769  }
6770
6771  // True if this blocks a move from the given location.
6772  bool Blocks(Location loc) const {
6773    return !IsEliminated() && source_.OverlapsWith(loc);
6774  }
6775
6776  // A move is redundant if it's been eliminated, if its source and
6777  // destination are the same, or if its destination is unneeded.
6778  bool IsRedundant() const {
6779    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6780  }
6781
6782  // We clear both operands to indicate move that's been eliminated.
6783  void Eliminate() {
6784    source_ = destination_ = Location::NoLocation();
6785  }
6786
6787  bool IsEliminated() const {
6788    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6789    return source_.IsInvalid();
6790  }
6791
6792  Primitive::Type GetType() const { return type_; }
6793
6794  bool Is64BitMove() const {
6795    return Primitive::Is64BitType(type_);
6796  }
6797
6798  HInstruction* GetInstruction() const { return instruction_; }
6799
6800 private:
6801  Location source_;
6802  Location destination_;
6803  // The type this move is for.
6804  Primitive::Type type_;
6805  // The instruction this move is assocatied with. Null when this move is
6806  // for moving an input in the expected locations of user (including a phi user).
6807  // This is only used in debug mode, to ensure we do not connect interval siblings
6808  // in the same parallel move.
6809  HInstruction* instruction_;
6810};
6811
6812std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6813
6814static constexpr size_t kDefaultNumberOfMoves = 4;
6815
6816class HParallelMove FINAL : public HTemplateInstruction<0> {
6817 public:
6818  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6819      : HTemplateInstruction(SideEffects::None(), dex_pc),
6820        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6821    moves_.reserve(kDefaultNumberOfMoves);
6822  }
6823
6824  void AddMove(Location source,
6825               Location destination,
6826               Primitive::Type type,
6827               HInstruction* instruction) {
6828    DCHECK(source.IsValid());
6829    DCHECK(destination.IsValid());
6830    if (kIsDebugBuild) {
6831      if (instruction != nullptr) {
6832        for (const MoveOperands& move : moves_) {
6833          if (move.GetInstruction() == instruction) {
6834            // Special case the situation where the move is for the spill slot
6835            // of the instruction.
6836            if ((GetPrevious() == instruction)
6837                || ((GetPrevious() == nullptr)
6838                    && instruction->IsPhi()
6839                    && instruction->GetBlock() == GetBlock())) {
6840              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6841                  << "Doing parallel moves for the same instruction.";
6842            } else {
6843              DCHECK(false) << "Doing parallel moves for the same instruction.";
6844            }
6845          }
6846        }
6847      }
6848      for (const MoveOperands& move : moves_) {
6849        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6850            << "Overlapped destination for two moves in a parallel move: "
6851            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6852            << source << " ==> " << destination;
6853      }
6854    }
6855    moves_.emplace_back(source, destination, type, instruction);
6856  }
6857
6858  MoveOperands* MoveOperandsAt(size_t index) {
6859    return &moves_[index];
6860  }
6861
6862  size_t NumMoves() const { return moves_.size(); }
6863
6864  DECLARE_INSTRUCTION(ParallelMove);
6865
6866 private:
6867  ArenaVector<MoveOperands> moves_;
6868
6869  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6870};
6871
6872}  // namespace art
6873
6874#include "nodes_vector.h"
6875
6876#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6877#include "nodes_shared.h"
6878#endif
6879#ifdef ART_ENABLE_CODEGEN_arm
6880#include "nodes_arm.h"
6881#endif
6882#ifdef ART_ENABLE_CODEGEN_mips
6883#include "nodes_mips.h"
6884#endif
6885#ifdef ART_ENABLE_CODEGEN_x86
6886#include "nodes_x86.h"
6887#endif
6888
6889namespace art {
6890
6891class HGraphVisitor : public ValueObject {
6892 public:
6893  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6894  virtual ~HGraphVisitor() {}
6895
6896  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6897  virtual void VisitBasicBlock(HBasicBlock* block);
6898
6899  // Visit the graph following basic block insertion order.
6900  void VisitInsertionOrder();
6901
6902  // Visit the graph following dominator tree reverse post-order.
6903  void VisitReversePostOrder();
6904
6905  HGraph* GetGraph() const { return graph_; }
6906
6907  // Visit functions for instruction classes.
6908#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6909  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6910
6911  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6912
6913#undef DECLARE_VISIT_INSTRUCTION
6914
6915 private:
6916  HGraph* const graph_;
6917
6918  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6919};
6920
6921class HGraphDelegateVisitor : public HGraphVisitor {
6922 public:
6923  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6924  virtual ~HGraphDelegateVisitor() {}
6925
6926  // Visit functions that delegate to to super class.
6927#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6928  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6929
6930  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6931
6932#undef DECLARE_VISIT_INSTRUCTION
6933
6934 private:
6935  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6936};
6937
6938// Iterator over the blocks that art part of the loop. Includes blocks part
6939// of an inner loop. The order in which the blocks are iterated is on their
6940// block id.
6941class HBlocksInLoopIterator : public ValueObject {
6942 public:
6943  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6944      : blocks_in_loop_(info.GetBlocks()),
6945        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6946        index_(0) {
6947    if (!blocks_in_loop_.IsBitSet(index_)) {
6948      Advance();
6949    }
6950  }
6951
6952  bool Done() const { return index_ == blocks_.size(); }
6953  HBasicBlock* Current() const { return blocks_[index_]; }
6954  void Advance() {
6955    ++index_;
6956    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6957      if (blocks_in_loop_.IsBitSet(index_)) {
6958        break;
6959      }
6960    }
6961  }
6962
6963 private:
6964  const BitVector& blocks_in_loop_;
6965  const ArenaVector<HBasicBlock*>& blocks_;
6966  size_t index_;
6967
6968  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6969};
6970
6971// Iterator over the blocks that art part of the loop. Includes blocks part
6972// of an inner loop. The order in which the blocks are iterated is reverse
6973// post order.
6974class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6975 public:
6976  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6977      : blocks_in_loop_(info.GetBlocks()),
6978        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6979        index_(0) {
6980    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6981      Advance();
6982    }
6983  }
6984
6985  bool Done() const { return index_ == blocks_.size(); }
6986  HBasicBlock* Current() const { return blocks_[index_]; }
6987  void Advance() {
6988    ++index_;
6989    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6990      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6991        break;
6992      }
6993    }
6994  }
6995
6996 private:
6997  const BitVector& blocks_in_loop_;
6998  const ArenaVector<HBasicBlock*>& blocks_;
6999  size_t index_;
7000
7001  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7002};
7003
7004// Returns int64_t value of a properly typed constant.
7005inline int64_t Int64FromConstant(HConstant* constant) {
7006  if (constant->IsIntConstant()) {
7007    return constant->AsIntConstant()->GetValue();
7008  } else if (constant->IsLongConstant()) {
7009    return constant->AsLongConstant()->GetValue();
7010  } else {
7011    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7012    return 0;
7013  }
7014}
7015
7016// Returns true iff instruction is an integral constant (and sets value on success).
7017inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7018  if (instruction->IsIntConstant()) {
7019    *value = instruction->AsIntConstant()->GetValue();
7020    return true;
7021  } else if (instruction->IsLongConstant()) {
7022    *value = instruction->AsLongConstant()->GetValue();
7023    return true;
7024  } else if (instruction->IsNullConstant()) {
7025    *value = 0;
7026    return true;
7027  }
7028  return false;
7029}
7030
7031#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7032  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7033  inline const H##type* HInstruction::As##type() const {                       \
7034    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7035  }                                                                            \
7036  inline H##type* HInstruction::As##type() {                                   \
7037    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7038  }
7039
7040  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7041#undef INSTRUCTION_TYPE_CHECK
7042
7043// Create space in `blocks` for adding `number_of_new_blocks` entries
7044// starting at location `at`. Blocks after `at` are moved accordingly.
7045inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7046                        size_t number_of_new_blocks,
7047                        size_t after) {
7048  DCHECK_LT(after, blocks->size());
7049  size_t old_size = blocks->size();
7050  size_t new_size = old_size + number_of_new_blocks;
7051  blocks->resize(new_size);
7052  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7053}
7054
7055/*
7056 * Hunt "under the hood" of array lengths (leading to array references),
7057 * null checks (also leading to array references), and new arrays
7058 * (leading to the actual length). This makes it more likely related
7059 * instructions become actually comparable.
7060 */
7061inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7062  while (instruction->IsArrayLength() ||
7063         instruction->IsNullCheck() ||
7064         instruction->IsNewArray()) {
7065    instruction = instruction->IsNewArray()
7066        ? instruction->AsNewArray()->GetLength()
7067        : instruction->InputAt(0);
7068  }
7069  return instruction;
7070}
7071
7072}  // namespace art
7073
7074#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7075