nodes.h revision c8fb211482e27ead6f015faf7e2b02225f728e99
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "data_type.h"
32#include "deoptimization_kind.h"
33#include "dex_file.h"
34#include "dex_file_types.h"
35#include "entrypoints/quick/quick_entrypoints_enum.h"
36#include "handle.h"
37#include "handle_scope.h"
38#include "intrinsics_enum.h"
39#include "invoke_type.h"
40#include "locations.h"
41#include "method_reference.h"
42#include "mirror/class.h"
43#include "offsets.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class GraphChecker;
49class HBasicBlock;
50class HConstructorFence;
51class HCurrentMethod;
52class HDoubleConstant;
53class HEnvironment;
54class HFloatConstant;
55class HGraphBuilder;
56class HGraphVisitor;
57class HInstruction;
58class HIntConstant;
59class HInvoke;
60class HLongConstant;
61class HNullConstant;
62class HParameterValue;
63class HPhi;
64class HSuspendCheck;
65class HTryBoundary;
66class LiveInterval;
67class LocationSummary;
68class SlowPathCode;
69class SsaBuilder;
70
71namespace mirror {
72class DexCache;
73}  // namespace mirror
74
75static const int kDefaultNumberOfBlocks = 8;
76static const int kDefaultNumberOfSuccessors = 2;
77static const int kDefaultNumberOfPredecessors = 2;
78static const int kDefaultNumberOfExceptionalPredecessors = 0;
79static const int kDefaultNumberOfDominatedBlocks = 1;
80static const int kDefaultNumberOfBackEdges = 1;
81
82// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
83static constexpr int32_t kMaxIntShiftDistance = 0x1f;
84// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
85static constexpr int32_t kMaxLongShiftDistance = 0x3f;
86
87static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
88static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
89
90static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
91
92static constexpr uint32_t kNoDexPc = -1;
93
94inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
95  // For the purposes of the compiler, the dex files must actually be the same object
96  // if we want to safely treat them as the same. This is especially important for JIT
97  // as custom class loaders can open the same underlying file (or memory) multiple
98  // times and provide different class resolution but no two class loaders should ever
99  // use the same DexFile object - doing so is an unsupported hack that can lead to
100  // all sorts of weird failures.
101  return &lhs == &rhs;
102}
103
104enum IfCondition {
105  // All types.
106  kCondEQ,  // ==
107  kCondNE,  // !=
108  // Signed integers and floating-point numbers.
109  kCondLT,  // <
110  kCondLE,  // <=
111  kCondGT,  // >
112  kCondGE,  // >=
113  // Unsigned integers.
114  kCondB,   // <
115  kCondBE,  // <=
116  kCondA,   // >
117  kCondAE,  // >=
118  // First and last aliases.
119  kCondFirst = kCondEQ,
120  kCondLast = kCondAE,
121};
122
123enum GraphAnalysisResult {
124  kAnalysisSkipped,
125  kAnalysisInvalidBytecode,
126  kAnalysisFailThrowCatchLoop,
127  kAnalysisFailAmbiguousArrayOp,
128  kAnalysisSuccess,
129};
130
131template <typename T>
132static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
133  return static_cast<typename std::make_unsigned<T>::type>(x);
134}
135
136class HInstructionList : public ValueObject {
137 public:
138  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
139
140  void AddInstruction(HInstruction* instruction);
141  void RemoveInstruction(HInstruction* instruction);
142
143  // Insert `instruction` before/after an existing instruction `cursor`.
144  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
145  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
146
147  // Return true if this list contains `instruction`.
148  bool Contains(HInstruction* instruction) const;
149
150  // Return true if `instruction1` is found before `instruction2` in
151  // this instruction list and false otherwise.  Abort if none
152  // of these instructions is found.
153  bool FoundBefore(const HInstruction* instruction1,
154                   const HInstruction* instruction2) const;
155
156  bool IsEmpty() const { return first_instruction_ == nullptr; }
157  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
158
159  // Update the block of all instructions to be `block`.
160  void SetBlockOfInstructions(HBasicBlock* block) const;
161
162  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
163  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
164  void Add(const HInstructionList& instruction_list);
165
166  // Return the number of instructions in the list. This is an expensive operation.
167  size_t CountSize() const;
168
169 private:
170  HInstruction* first_instruction_;
171  HInstruction* last_instruction_;
172
173  friend class HBasicBlock;
174  friend class HGraph;
175  friend class HInstruction;
176  friend class HInstructionIterator;
177  friend class HInstructionIteratorHandleChanges;
178  friend class HBackwardInstructionIterator;
179
180  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
181};
182
183class ReferenceTypeInfo : ValueObject {
184 public:
185  typedef Handle<mirror::Class> TypeHandle;
186
187  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
188
189  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
190    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
191  }
192
193  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
194    return ReferenceTypeInfo(type_handle, is_exact);
195  }
196
197  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
198
199  static bool IsValidHandle(TypeHandle handle) {
200    return handle.GetReference() != nullptr;
201  }
202
203  bool IsValid() const {
204    return IsValidHandle(type_handle_);
205  }
206
207  bool IsExact() const { return is_exact_; }
208
209  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsObjectClass();
212  }
213
214  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    return GetTypeHandle()->IsStringClass();
217  }
218
219  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
220    DCHECK(IsValid());
221    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
222  }
223
224  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
225    DCHECK(IsValid());
226    return GetTypeHandle()->IsInterface();
227  }
228
229  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230    DCHECK(IsValid());
231    return GetTypeHandle()->IsArrayClass();
232  }
233
234  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
235    DCHECK(IsValid());
236    return GetTypeHandle()->IsPrimitiveArray();
237  }
238
239  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
240    DCHECK(IsValid());
241    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
242  }
243
244  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
245    DCHECK(IsValid());
246    if (!IsExact()) return false;
247    if (!IsArrayClass()) return false;
248    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
249  }
250
251  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
252    DCHECK(IsValid());
253    if (!IsExact()) return false;
254    if (!IsArrayClass()) return false;
255    if (!rti.IsArrayClass()) return false;
256    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
257        rti.GetTypeHandle()->GetComponentType());
258  }
259
260  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
261
262  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
263    DCHECK(IsValid());
264    DCHECK(rti.IsValid());
265    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
266  }
267
268  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269    DCHECK(IsValid());
270    DCHECK(rti.IsValid());
271    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
272        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
273  }
274
275  // Returns true if the type information provide the same amount of details.
276  // Note that it does not mean that the instructions have the same actual type
277  // (because the type can be the result of a merge).
278  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
279    if (!IsValid() && !rti.IsValid()) {
280      // Invalid types are equal.
281      return true;
282    }
283    if (!IsValid() || !rti.IsValid()) {
284      // One is valid, the other not.
285      return false;
286    }
287    return IsExact() == rti.IsExact()
288        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
289  }
290
291 private:
292  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
293  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
294      : type_handle_(type_handle), is_exact_(is_exact) { }
295
296  // The class of the object.
297  TypeHandle type_handle_;
298  // Whether or not the type is exact or a superclass of the actual type.
299  // Whether or not we have any information about this type.
300  bool is_exact_;
301};
302
303std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
304
305// Control-flow graph of a method. Contains a list of basic blocks.
306class HGraph : public ArenaObject<kArenaAllocGraph> {
307 public:
308  HGraph(ArenaAllocator* arena,
309         const DexFile& dex_file,
310         uint32_t method_idx,
311         InstructionSet instruction_set,
312         InvokeType invoke_type = kInvalidInvokeType,
313         bool debuggable = false,
314         bool osr = false,
315         int start_instruction_id = 0)
316      : arena_(arena),
317        blocks_(arena->Adapter(kArenaAllocBlockList)),
318        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
319        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
320        entry_block_(nullptr),
321        exit_block_(nullptr),
322        maximum_number_of_out_vregs_(0),
323        number_of_vregs_(0),
324        number_of_in_vregs_(0),
325        temporaries_vreg_slots_(0),
326        has_bounds_checks_(false),
327        has_try_catch_(false),
328        has_simd_(false),
329        has_loops_(false),
330        has_irreducible_loops_(false),
331        debuggable_(debuggable),
332        current_instruction_id_(start_instruction_id),
333        dex_file_(dex_file),
334        method_idx_(method_idx),
335        invoke_type_(invoke_type),
336        in_ssa_form_(false),
337        number_of_cha_guards_(0),
338        instruction_set_(instruction_set),
339        cached_null_constant_(nullptr),
340        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
342        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
343        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
344        cached_current_method_(nullptr),
345        art_method_(nullptr),
346        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
347        osr_(osr),
348        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
349    blocks_.reserve(kDefaultNumberOfBlocks);
350  }
351
352  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
353  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
354
355  ArenaAllocator* GetArena() const { return arena_; }
356  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
357
358  bool IsInSsaForm() const { return in_ssa_form_; }
359  void SetInSsaForm() { in_ssa_form_ = true; }
360
361  HBasicBlock* GetEntryBlock() const { return entry_block_; }
362  HBasicBlock* GetExitBlock() const { return exit_block_; }
363  bool HasExitBlock() const { return exit_block_ != nullptr; }
364
365  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
366  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
367
368  void AddBlock(HBasicBlock* block);
369
370  void ComputeDominanceInformation();
371  void ClearDominanceInformation();
372  void ClearLoopInformation();
373  void FindBackEdges(ArenaBitVector* visited);
374  GraphAnalysisResult BuildDominatorTree();
375  void SimplifyCFG();
376  void SimplifyCatchBlocks();
377
378  // Analyze all natural loops in this graph. Returns a code specifying that it
379  // was successful or the reason for failure. The method will fail if a loop
380  // is a throw-catch loop, i.e. the header is a catch block.
381  GraphAnalysisResult AnalyzeLoops() const;
382
383  // Iterate over blocks to compute try block membership. Needs reverse post
384  // order and loop information.
385  void ComputeTryBlockInformation();
386
387  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
388  // Returns the instruction to replace the invoke expression or null if the
389  // invoke is for a void method. Note that the caller is responsible for replacing
390  // and removing the invoke instruction.
391  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
392
393  // Update the loop and try membership of `block`, which was spawned from `reference`.
394  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
395  // should be the new back edge.
396  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
397                                             HBasicBlock* reference,
398                                             bool replace_if_back_edge);
399
400  // Need to add a couple of blocks to test if the loop body is entered and
401  // put deoptimization instructions, etc.
402  void TransformLoopHeaderForBCE(HBasicBlock* header);
403
404  // Adds a new loop directly after the loop with the given header and exit.
405  // Returns the new preheader.
406  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
407                                             HBasicBlock* body,
408                                             HBasicBlock* exit);
409
410  // Removes `block` from the graph. Assumes `block` has been disconnected from
411  // other blocks and has no instructions or phis.
412  void DeleteDeadEmptyBlock(HBasicBlock* block);
413
414  // Splits the edge between `block` and `successor` while preserving the
415  // indices in the predecessor/successor lists. If there are multiple edges
416  // between the blocks, the lowest indices are used.
417  // Returns the new block which is empty and has the same dex pc as `successor`.
418  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
419
420  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
421  void OrderLoopHeaderPredecessors(HBasicBlock* header);
422  void SimplifyLoop(HBasicBlock* header);
423
424  int32_t GetNextInstructionId() {
425    CHECK_NE(current_instruction_id_, INT32_MAX);
426    return current_instruction_id_++;
427  }
428
429  int32_t GetCurrentInstructionId() const {
430    return current_instruction_id_;
431  }
432
433  void SetCurrentInstructionId(int32_t id) {
434    CHECK_GE(id, current_instruction_id_);
435    current_instruction_id_ = id;
436  }
437
438  uint16_t GetMaximumNumberOfOutVRegs() const {
439    return maximum_number_of_out_vregs_;
440  }
441
442  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
443    maximum_number_of_out_vregs_ = new_value;
444  }
445
446  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
447    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
448  }
449
450  void UpdateTemporariesVRegSlots(size_t slots) {
451    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
452  }
453
454  size_t GetTemporariesVRegSlots() const {
455    DCHECK(!in_ssa_form_);
456    return temporaries_vreg_slots_;
457  }
458
459  void SetNumberOfVRegs(uint16_t number_of_vregs) {
460    number_of_vregs_ = number_of_vregs;
461  }
462
463  uint16_t GetNumberOfVRegs() const {
464    return number_of_vregs_;
465  }
466
467  void SetNumberOfInVRegs(uint16_t value) {
468    number_of_in_vregs_ = value;
469  }
470
471  uint16_t GetNumberOfInVRegs() const {
472    return number_of_in_vregs_;
473  }
474
475  uint16_t GetNumberOfLocalVRegs() const {
476    DCHECK(!in_ssa_form_);
477    return number_of_vregs_ - number_of_in_vregs_;
478  }
479
480  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
481    return reverse_post_order_;
482  }
483
484  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
485    DCHECK(GetReversePostOrder()[0] == entry_block_);
486    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
487  }
488
489  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
490    return ReverseRange(GetReversePostOrder());
491  }
492
493  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
494    return linear_order_;
495  }
496
497  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
498    return ReverseRange(GetLinearOrder());
499  }
500
501  bool HasBoundsChecks() const {
502    return has_bounds_checks_;
503  }
504
505  void SetHasBoundsChecks(bool value) {
506    has_bounds_checks_ = value;
507  }
508
509  bool IsDebuggable() const { return debuggable_; }
510
511  // Returns a constant of the given type and value. If it does not exist
512  // already, it is created and inserted into the graph. This method is only for
513  // integral types.
514  HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
515
516  // TODO: This is problematic for the consistency of reference type propagation
517  // because it can be created anytime after the pass and thus it will be left
518  // with an invalid type.
519  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
520
521  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
522    return CreateConstant(value, &cached_int_constants_, dex_pc);
523  }
524  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
525    return CreateConstant(value, &cached_long_constants_, dex_pc);
526  }
527  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
528    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
529  }
530  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
531    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
532  }
533
534  HCurrentMethod* GetCurrentMethod();
535
536  const DexFile& GetDexFile() const {
537    return dex_file_;
538  }
539
540  uint32_t GetMethodIdx() const {
541    return method_idx_;
542  }
543
544  // Get the method name (without the signature), e.g. "<init>"
545  const char* GetMethodName() const;
546
547  // Get the pretty method name (class + name + optionally signature).
548  std::string PrettyMethod(bool with_signature = true) const;
549
550  InvokeType GetInvokeType() const {
551    return invoke_type_;
552  }
553
554  InstructionSet GetInstructionSet() const {
555    return instruction_set_;
556  }
557
558  bool IsCompilingOsr() const { return osr_; }
559
560  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
561    return cha_single_implementation_list_;
562  }
563
564  void AddCHASingleImplementationDependency(ArtMethod* method) {
565    cha_single_implementation_list_.insert(method);
566  }
567
568  bool HasShouldDeoptimizeFlag() const {
569    return number_of_cha_guards_ != 0;
570  }
571
572  bool HasTryCatch() const { return has_try_catch_; }
573  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
574
575  bool HasSIMD() const { return has_simd_; }
576  void SetHasSIMD(bool value) { has_simd_ = value; }
577
578  bool HasLoops() const { return has_loops_; }
579  void SetHasLoops(bool value) { has_loops_ = value; }
580
581  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
582  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
583
584  ArtMethod* GetArtMethod() const { return art_method_; }
585  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
586
587  // Returns an instruction with the opposite Boolean value from 'cond'.
588  // The instruction has been inserted into the graph, either as a constant, or
589  // before cursor.
590  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
591
592  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
593
594  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
595  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
596  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
597
598 private:
599  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
600  void RemoveDeadBlocks(const ArenaBitVector& visited);
601
602  template <class InstructionType, typename ValueType>
603  InstructionType* CreateConstant(ValueType value,
604                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
605                                  uint32_t dex_pc = kNoDexPc) {
606    // Try to find an existing constant of the given value.
607    InstructionType* constant = nullptr;
608    auto cached_constant = cache->find(value);
609    if (cached_constant != cache->end()) {
610      constant = cached_constant->second;
611    }
612
613    // If not found or previously deleted, create and cache a new instruction.
614    // Don't bother reviving a previously deleted instruction, for simplicity.
615    if (constant == nullptr || constant->GetBlock() == nullptr) {
616      constant = new (arena_) InstructionType(value, dex_pc);
617      cache->Overwrite(value, constant);
618      InsertConstant(constant);
619    }
620    return constant;
621  }
622
623  void InsertConstant(HConstant* instruction);
624
625  // Cache a float constant into the graph. This method should only be
626  // called by the SsaBuilder when creating "equivalent" instructions.
627  void CacheFloatConstant(HFloatConstant* constant);
628
629  // See CacheFloatConstant comment.
630  void CacheDoubleConstant(HDoubleConstant* constant);
631
632  ArenaAllocator* const arena_;
633
634  // List of blocks in insertion order.
635  ArenaVector<HBasicBlock*> blocks_;
636
637  // List of blocks to perform a reverse post order tree traversal.
638  ArenaVector<HBasicBlock*> reverse_post_order_;
639
640  // List of blocks to perform a linear order tree traversal. Unlike the reverse
641  // post order, this order is not incrementally kept up-to-date.
642  ArenaVector<HBasicBlock*> linear_order_;
643
644  HBasicBlock* entry_block_;
645  HBasicBlock* exit_block_;
646
647  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
648  uint16_t maximum_number_of_out_vregs_;
649
650  // The number of virtual registers in this method. Contains the parameters.
651  uint16_t number_of_vregs_;
652
653  // The number of virtual registers used by parameters of this method.
654  uint16_t number_of_in_vregs_;
655
656  // Number of vreg size slots that the temporaries use (used in baseline compiler).
657  size_t temporaries_vreg_slots_;
658
659  // Flag whether there are bounds checks in the graph. We can skip
660  // BCE if it's false. It's only best effort to keep it up to date in
661  // the presence of code elimination so there might be false positives.
662  bool has_bounds_checks_;
663
664  // Flag whether there are try/catch blocks in the graph. We will skip
665  // try/catch-related passes if it's false. It's only best effort to keep
666  // it up to date in the presence of code elimination so there might be
667  // false positives.
668  bool has_try_catch_;
669
670  // Flag whether SIMD instructions appear in the graph. If true, the
671  // code generators may have to be more careful spilling the wider
672  // contents of SIMD registers.
673  bool has_simd_;
674
675  // Flag whether there are any loops in the graph. We can skip loop
676  // optimization if it's false. It's only best effort to keep it up
677  // to date in the presence of code elimination so there might be false
678  // positives.
679  bool has_loops_;
680
681  // Flag whether there are any irreducible loops in the graph. It's only
682  // best effort to keep it up to date in the presence of code elimination
683  // so there might be false positives.
684  bool has_irreducible_loops_;
685
686  // Indicates whether the graph should be compiled in a way that
687  // ensures full debuggability. If false, we can apply more
688  // aggressive optimizations that may limit the level of debugging.
689  const bool debuggable_;
690
691  // The current id to assign to a newly added instruction. See HInstruction.id_.
692  int32_t current_instruction_id_;
693
694  // The dex file from which the method is from.
695  const DexFile& dex_file_;
696
697  // The method index in the dex file.
698  const uint32_t method_idx_;
699
700  // If inlined, this encodes how the callee is being invoked.
701  const InvokeType invoke_type_;
702
703  // Whether the graph has been transformed to SSA form. Only used
704  // in debug mode to ensure we are not using properties only valid
705  // for non-SSA form (like the number of temporaries).
706  bool in_ssa_form_;
707
708  // Number of CHA guards in the graph. Used to short-circuit the
709  // CHA guard optimization pass when there is no CHA guard left.
710  uint32_t number_of_cha_guards_;
711
712  const InstructionSet instruction_set_;
713
714  // Cached constants.
715  HNullConstant* cached_null_constant_;
716  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
717  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
718  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
719  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
720
721  HCurrentMethod* cached_current_method_;
722
723  // The ArtMethod this graph is for. Note that for AOT, it may be null,
724  // for example for methods whose declaring class could not be resolved
725  // (such as when the superclass could not be found).
726  ArtMethod* art_method_;
727
728  // Keep the RTI of inexact Object to avoid having to pass stack handle
729  // collection pointer to passes which may create NullConstant.
730  ReferenceTypeInfo inexact_object_rti_;
731
732  // Whether we are compiling this graph for on stack replacement: this will
733  // make all loops seen as irreducible and emit special stack maps to mark
734  // compiled code entries which the interpreter can directly jump to.
735  const bool osr_;
736
737  // List of methods that are assumed to have single implementation.
738  ArenaSet<ArtMethod*> cha_single_implementation_list_;
739
740  friend class SsaBuilder;           // For caching constants.
741  friend class SsaLivenessAnalysis;  // For the linear order.
742  friend class HInliner;             // For the reverse post order.
743  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
744  DISALLOW_COPY_AND_ASSIGN(HGraph);
745};
746
747class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
748 public:
749  HLoopInformation(HBasicBlock* header, HGraph* graph)
750      : header_(header),
751        suspend_check_(nullptr),
752        irreducible_(false),
753        contains_irreducible_loop_(false),
754        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
755        // Make bit vector growable, as the number of blocks may change.
756        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
757    back_edges_.reserve(kDefaultNumberOfBackEdges);
758  }
759
760  bool IsIrreducible() const { return irreducible_; }
761  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
762
763  void Dump(std::ostream& os);
764
765  HBasicBlock* GetHeader() const {
766    return header_;
767  }
768
769  void SetHeader(HBasicBlock* block) {
770    header_ = block;
771  }
772
773  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
774  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
775  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
776
777  void AddBackEdge(HBasicBlock* back_edge) {
778    back_edges_.push_back(back_edge);
779  }
780
781  void RemoveBackEdge(HBasicBlock* back_edge) {
782    RemoveElement(back_edges_, back_edge);
783  }
784
785  bool IsBackEdge(const HBasicBlock& block) const {
786    return ContainsElement(back_edges_, &block);
787  }
788
789  size_t NumberOfBackEdges() const {
790    return back_edges_.size();
791  }
792
793  HBasicBlock* GetPreHeader() const;
794
795  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
796    return back_edges_;
797  }
798
799  // Returns the lifetime position of the back edge that has the
800  // greatest lifetime position.
801  size_t GetLifetimeEnd() const;
802
803  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
804    ReplaceElement(back_edges_, existing, new_back_edge);
805  }
806
807  // Finds blocks that are part of this loop.
808  void Populate();
809
810  // Returns whether this loop information contains `block`.
811  // Note that this loop information *must* be populated before entering this function.
812  bool Contains(const HBasicBlock& block) const;
813
814  // Returns whether this loop information is an inner loop of `other`.
815  // Note that `other` *must* be populated before entering this function.
816  bool IsIn(const HLoopInformation& other) const;
817
818  // Returns true if instruction is not defined within this loop.
819  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
820
821  const ArenaBitVector& GetBlocks() const { return blocks_; }
822
823  void Add(HBasicBlock* block);
824  void Remove(HBasicBlock* block);
825
826  void ClearAllBlocks() {
827    blocks_.ClearAllBits();
828  }
829
830  bool HasBackEdgeNotDominatedByHeader() const;
831
832  bool IsPopulated() const {
833    return blocks_.GetHighestBitSet() != -1;
834  }
835
836  bool DominatesAllBackEdges(HBasicBlock* block);
837
838  bool HasExitEdge() const;
839
840 private:
841  // Internal recursive implementation of `Populate`.
842  void PopulateRecursive(HBasicBlock* block);
843  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
844
845  HBasicBlock* header_;
846  HSuspendCheck* suspend_check_;
847  bool irreducible_;
848  bool contains_irreducible_loop_;
849  ArenaVector<HBasicBlock*> back_edges_;
850  ArenaBitVector blocks_;
851
852  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
853};
854
855// Stores try/catch information for basic blocks.
856// Note that HGraph is constructed so that catch blocks cannot simultaneously
857// be try blocks.
858class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
859 public:
860  // Try block information constructor.
861  explicit TryCatchInformation(const HTryBoundary& try_entry)
862      : try_entry_(&try_entry),
863        catch_dex_file_(nullptr),
864        catch_type_index_(DexFile::kDexNoIndex16) {
865    DCHECK(try_entry_ != nullptr);
866  }
867
868  // Catch block information constructor.
869  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
870      : try_entry_(nullptr),
871        catch_dex_file_(&dex_file),
872        catch_type_index_(catch_type_index) {}
873
874  bool IsTryBlock() const { return try_entry_ != nullptr; }
875
876  const HTryBoundary& GetTryEntry() const {
877    DCHECK(IsTryBlock());
878    return *try_entry_;
879  }
880
881  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
882
883  bool IsCatchAllTypeIndex() const {
884    DCHECK(IsCatchBlock());
885    return !catch_type_index_.IsValid();
886  }
887
888  dex::TypeIndex GetCatchTypeIndex() const {
889    DCHECK(IsCatchBlock());
890    return catch_type_index_;
891  }
892
893  const DexFile& GetCatchDexFile() const {
894    DCHECK(IsCatchBlock());
895    return *catch_dex_file_;
896  }
897
898 private:
899  // One of possibly several TryBoundary instructions entering the block's try.
900  // Only set for try blocks.
901  const HTryBoundary* try_entry_;
902
903  // Exception type information. Only set for catch blocks.
904  const DexFile* catch_dex_file_;
905  const dex::TypeIndex catch_type_index_;
906};
907
908static constexpr size_t kNoLifetime = -1;
909static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
910
911// A block in a method. Contains the list of instructions represented
912// as a double linked list. Each block knows its predecessors and
913// successors.
914
915class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
916 public:
917  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
918      : graph_(graph),
919        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
920        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
921        loop_information_(nullptr),
922        dominator_(nullptr),
923        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
924        block_id_(kInvalidBlockId),
925        dex_pc_(dex_pc),
926        lifetime_start_(kNoLifetime),
927        lifetime_end_(kNoLifetime),
928        try_catch_information_(nullptr) {
929    predecessors_.reserve(kDefaultNumberOfPredecessors);
930    successors_.reserve(kDefaultNumberOfSuccessors);
931    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
932  }
933
934  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
935    return predecessors_;
936  }
937
938  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
939    return successors_;
940  }
941
942  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
943  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
944
945  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
946    return ContainsElement(successors_, block, start_from);
947  }
948
949  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
950    return dominated_blocks_;
951  }
952
953  bool IsEntryBlock() const {
954    return graph_->GetEntryBlock() == this;
955  }
956
957  bool IsExitBlock() const {
958    return graph_->GetExitBlock() == this;
959  }
960
961  bool IsSingleGoto() const;
962  bool IsSingleReturn() const;
963  bool IsSingleTryBoundary() const;
964
965  // Returns true if this block emits nothing but a jump.
966  bool IsSingleJump() const {
967    HLoopInformation* loop_info = GetLoopInformation();
968    return (IsSingleGoto() || IsSingleTryBoundary())
969           // Back edges generate a suspend check.
970           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
971  }
972
973  void AddBackEdge(HBasicBlock* back_edge) {
974    if (loop_information_ == nullptr) {
975      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
976    }
977    DCHECK_EQ(loop_information_->GetHeader(), this);
978    loop_information_->AddBackEdge(back_edge);
979  }
980
981  HGraph* GetGraph() const { return graph_; }
982  void SetGraph(HGraph* graph) { graph_ = graph; }
983
984  uint32_t GetBlockId() const { return block_id_; }
985  void SetBlockId(int id) { block_id_ = id; }
986  uint32_t GetDexPc() const { return dex_pc_; }
987
988  HBasicBlock* GetDominator() const { return dominator_; }
989  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
990  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
991
992  void RemoveDominatedBlock(HBasicBlock* block) {
993    RemoveElement(dominated_blocks_, block);
994  }
995
996  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
997    ReplaceElement(dominated_blocks_, existing, new_block);
998  }
999
1000  void ClearDominanceInformation();
1001
1002  int NumberOfBackEdges() const {
1003    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1004  }
1005
1006  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1007  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1008  const HInstructionList& GetInstructions() const { return instructions_; }
1009  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1010  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1011  const HInstructionList& GetPhis() const { return phis_; }
1012
1013  HInstruction* GetFirstInstructionDisregardMoves() const;
1014
1015  void AddSuccessor(HBasicBlock* block) {
1016    successors_.push_back(block);
1017    block->predecessors_.push_back(this);
1018  }
1019
1020  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1021    size_t successor_index = GetSuccessorIndexOf(existing);
1022    existing->RemovePredecessor(this);
1023    new_block->predecessors_.push_back(this);
1024    successors_[successor_index] = new_block;
1025  }
1026
1027  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1028    size_t predecessor_index = GetPredecessorIndexOf(existing);
1029    existing->RemoveSuccessor(this);
1030    new_block->successors_.push_back(this);
1031    predecessors_[predecessor_index] = new_block;
1032  }
1033
1034  // Insert `this` between `predecessor` and `successor. This method
1035  // preserves the indicies, and will update the first edge found between
1036  // `predecessor` and `successor`.
1037  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1038    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1039    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1040    successor->predecessors_[predecessor_index] = this;
1041    predecessor->successors_[successor_index] = this;
1042    successors_.push_back(successor);
1043    predecessors_.push_back(predecessor);
1044  }
1045
1046  void RemovePredecessor(HBasicBlock* block) {
1047    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1048  }
1049
1050  void RemoveSuccessor(HBasicBlock* block) {
1051    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1052  }
1053
1054  void ClearAllPredecessors() {
1055    predecessors_.clear();
1056  }
1057
1058  void AddPredecessor(HBasicBlock* block) {
1059    predecessors_.push_back(block);
1060    block->successors_.push_back(this);
1061  }
1062
1063  void SwapPredecessors() {
1064    DCHECK_EQ(predecessors_.size(), 2u);
1065    std::swap(predecessors_[0], predecessors_[1]);
1066  }
1067
1068  void SwapSuccessors() {
1069    DCHECK_EQ(successors_.size(), 2u);
1070    std::swap(successors_[0], successors_[1]);
1071  }
1072
1073  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1074    return IndexOfElement(predecessors_, predecessor);
1075  }
1076
1077  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1078    return IndexOfElement(successors_, successor);
1079  }
1080
1081  HBasicBlock* GetSinglePredecessor() const {
1082    DCHECK_EQ(GetPredecessors().size(), 1u);
1083    return GetPredecessors()[0];
1084  }
1085
1086  HBasicBlock* GetSingleSuccessor() const {
1087    DCHECK_EQ(GetSuccessors().size(), 1u);
1088    return GetSuccessors()[0];
1089  }
1090
1091  // Returns whether the first occurrence of `predecessor` in the list of
1092  // predecessors is at index `idx`.
1093  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1094    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1095    return GetPredecessorIndexOf(predecessor) == idx;
1096  }
1097
1098  // Create a new block between this block and its predecessors. The new block
1099  // is added to the graph, all predecessor edges are relinked to it and an edge
1100  // is created to `this`. Returns the new empty block. Reverse post order or
1101  // loop and try/catch information are not updated.
1102  HBasicBlock* CreateImmediateDominator();
1103
1104  // Split the block into two blocks just before `cursor`. Returns the newly
1105  // created, latter block. Note that this method will add the block to the
1106  // graph, create a Goto at the end of the former block and will create an edge
1107  // between the blocks. It will not, however, update the reverse post order or
1108  // loop and try/catch information.
1109  HBasicBlock* SplitBefore(HInstruction* cursor);
1110
1111  // Split the block into two blocks just before `cursor`. Returns the newly
1112  // created block. Note that this method just updates raw block information,
1113  // like predecessors, successors, dominators, and instruction list. It does not
1114  // update the graph, reverse post order, loop information, nor make sure the
1115  // blocks are consistent (for example ending with a control flow instruction).
1116  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1117
1118  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1119  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1120
1121  // Merge `other` at the end of `this`. Successors and dominated blocks of
1122  // `other` are changed to be successors and dominated blocks of `this`. Note
1123  // that this method does not update the graph, reverse post order, loop
1124  // information, nor make sure the blocks are consistent (for example ending
1125  // with a control flow instruction).
1126  void MergeWithInlined(HBasicBlock* other);
1127
1128  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1129  // of `this` are moved to `other`.
1130  // Note that this method does not update the graph, reverse post order, loop
1131  // information, nor make sure the blocks are consistent (for example ending
1132  // with a control flow instruction).
1133  void ReplaceWith(HBasicBlock* other);
1134
1135  // Merges the instructions of `other` at the end of `this`.
1136  void MergeInstructionsWith(HBasicBlock* other);
1137
1138  // Merge `other` at the end of `this`. This method updates loops, reverse post
1139  // order, links to predecessors, successors, dominators and deletes the block
1140  // from the graph. The two blocks must be successive, i.e. `this` the only
1141  // predecessor of `other` and vice versa.
1142  void MergeWith(HBasicBlock* other);
1143
1144  // Disconnects `this` from all its predecessors, successors and dominator,
1145  // removes it from all loops it is included in and eventually from the graph.
1146  // The block must not dominate any other block. Predecessors and successors
1147  // are safely updated.
1148  void DisconnectAndDelete();
1149
1150  void AddInstruction(HInstruction* instruction);
1151  // Insert `instruction` before/after an existing instruction `cursor`.
1152  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1153  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1154  // Replace instruction `initial` with `replacement` within this block.
1155  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1156                                       HInstruction* replacement);
1157  void AddPhi(HPhi* phi);
1158  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1159  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1160  // instruction list. With 'ensure_safety' set to true, it verifies that the
1161  // instruction is not in use and removes it from the use lists of its inputs.
1162  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1163  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1164  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1165
1166  bool IsLoopHeader() const {
1167    return IsInLoop() && (loop_information_->GetHeader() == this);
1168  }
1169
1170  bool IsLoopPreHeaderFirstPredecessor() const {
1171    DCHECK(IsLoopHeader());
1172    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1173  }
1174
1175  bool IsFirstPredecessorBackEdge() const {
1176    DCHECK(IsLoopHeader());
1177    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1178  }
1179
1180  HLoopInformation* GetLoopInformation() const {
1181    return loop_information_;
1182  }
1183
1184  // Set the loop_information_ on this block. Overrides the current
1185  // loop_information if it is an outer loop of the passed loop information.
1186  // Note that this method is called while creating the loop information.
1187  void SetInLoop(HLoopInformation* info) {
1188    if (IsLoopHeader()) {
1189      // Nothing to do. This just means `info` is an outer loop.
1190    } else if (!IsInLoop()) {
1191      loop_information_ = info;
1192    } else if (loop_information_->Contains(*info->GetHeader())) {
1193      // Block is currently part of an outer loop. Make it part of this inner loop.
1194      // Note that a non loop header having a loop information means this loop information
1195      // has already been populated
1196      loop_information_ = info;
1197    } else {
1198      // Block is part of an inner loop. Do not update the loop information.
1199      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1200      // at this point, because this method is being called while populating `info`.
1201    }
1202  }
1203
1204  // Raw update of the loop information.
1205  void SetLoopInformation(HLoopInformation* info) {
1206    loop_information_ = info;
1207  }
1208
1209  bool IsInLoop() const { return loop_information_ != nullptr; }
1210
1211  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1212
1213  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1214    try_catch_information_ = try_catch_information;
1215  }
1216
1217  bool IsTryBlock() const {
1218    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1219  }
1220
1221  bool IsCatchBlock() const {
1222    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1223  }
1224
1225  // Returns the try entry that this block's successors should have. They will
1226  // be in the same try, unless the block ends in a try boundary. In that case,
1227  // the appropriate try entry will be returned.
1228  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1229
1230  bool HasThrowingInstructions() const;
1231
1232  // Returns whether this block dominates the blocked passed as parameter.
1233  bool Dominates(HBasicBlock* block) const;
1234
1235  size_t GetLifetimeStart() const { return lifetime_start_; }
1236  size_t GetLifetimeEnd() const { return lifetime_end_; }
1237
1238  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1239  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1240
1241  bool EndsWithControlFlowInstruction() const;
1242  bool EndsWithIf() const;
1243  bool EndsWithTryBoundary() const;
1244  bool HasSinglePhi() const;
1245
1246 private:
1247  HGraph* graph_;
1248  ArenaVector<HBasicBlock*> predecessors_;
1249  ArenaVector<HBasicBlock*> successors_;
1250  HInstructionList instructions_;
1251  HInstructionList phis_;
1252  HLoopInformation* loop_information_;
1253  HBasicBlock* dominator_;
1254  ArenaVector<HBasicBlock*> dominated_blocks_;
1255  uint32_t block_id_;
1256  // The dex program counter of the first instruction of this block.
1257  const uint32_t dex_pc_;
1258  size_t lifetime_start_;
1259  size_t lifetime_end_;
1260  TryCatchInformation* try_catch_information_;
1261
1262  friend class HGraph;
1263  friend class HInstruction;
1264
1265  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1266};
1267
1268// Iterates over the LoopInformation of all loops which contain 'block'
1269// from the innermost to the outermost.
1270class HLoopInformationOutwardIterator : public ValueObject {
1271 public:
1272  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1273      : current_(block.GetLoopInformation()) {}
1274
1275  bool Done() const { return current_ == nullptr; }
1276
1277  void Advance() {
1278    DCHECK(!Done());
1279    current_ = current_->GetPreHeader()->GetLoopInformation();
1280  }
1281
1282  HLoopInformation* Current() const {
1283    DCHECK(!Done());
1284    return current_;
1285  }
1286
1287 private:
1288  HLoopInformation* current_;
1289
1290  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1291};
1292
1293#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1294  M(Above, Condition)                                                   \
1295  M(AboveOrEqual, Condition)                                            \
1296  M(Add, BinaryOperation)                                               \
1297  M(And, BinaryOperation)                                               \
1298  M(ArrayGet, Instruction)                                              \
1299  M(ArrayLength, Instruction)                                           \
1300  M(ArraySet, Instruction)                                              \
1301  M(Below, Condition)                                                   \
1302  M(BelowOrEqual, Condition)                                            \
1303  M(BooleanNot, UnaryOperation)                                         \
1304  M(BoundsCheck, Instruction)                                           \
1305  M(BoundType, Instruction)                                             \
1306  M(CheckCast, Instruction)                                             \
1307  M(ClassTableGet, Instruction)                                         \
1308  M(ClearException, Instruction)                                        \
1309  M(ClinitCheck, Instruction)                                           \
1310  M(Compare, BinaryOperation)                                           \
1311  M(ConstructorFence, Instruction)                                      \
1312  M(CurrentMethod, Instruction)                                         \
1313  M(ShouldDeoptimizeFlag, Instruction)                                  \
1314  M(Deoptimize, Instruction)                                            \
1315  M(Div, BinaryOperation)                                               \
1316  M(DivZeroCheck, Instruction)                                          \
1317  M(DoubleConstant, Constant)                                           \
1318  M(Equal, Condition)                                                   \
1319  M(Exit, Instruction)                                                  \
1320  M(FloatConstant, Constant)                                            \
1321  M(Goto, Instruction)                                                  \
1322  M(GreaterThan, Condition)                                             \
1323  M(GreaterThanOrEqual, Condition)                                      \
1324  M(If, Instruction)                                                    \
1325  M(InstanceFieldGet, Instruction)                                      \
1326  M(InstanceFieldSet, Instruction)                                      \
1327  M(InstanceOf, Instruction)                                            \
1328  M(IntConstant, Constant)                                              \
1329  M(InvokeUnresolved, Invoke)                                           \
1330  M(InvokeInterface, Invoke)                                            \
1331  M(InvokeStaticOrDirect, Invoke)                                       \
1332  M(InvokeVirtual, Invoke)                                              \
1333  M(InvokePolymorphic, Invoke)                                          \
1334  M(LessThan, Condition)                                                \
1335  M(LessThanOrEqual, Condition)                                         \
1336  M(LoadClass, Instruction)                                             \
1337  M(LoadException, Instruction)                                         \
1338  M(LoadString, Instruction)                                            \
1339  M(LongConstant, Constant)                                             \
1340  M(MemoryBarrier, Instruction)                                         \
1341  M(MonitorOperation, Instruction)                                      \
1342  M(Mul, BinaryOperation)                                               \
1343  M(NativeDebugInfo, Instruction)                                       \
1344  M(Neg, UnaryOperation)                                                \
1345  M(NewArray, Instruction)                                              \
1346  M(NewInstance, Instruction)                                           \
1347  M(Not, UnaryOperation)                                                \
1348  M(NotEqual, Condition)                                                \
1349  M(NullConstant, Instruction)                                          \
1350  M(NullCheck, Instruction)                                             \
1351  M(Or, BinaryOperation)                                                \
1352  M(PackedSwitch, Instruction)                                          \
1353  M(ParallelMove, Instruction)                                          \
1354  M(ParameterValue, Instruction)                                        \
1355  M(Phi, Instruction)                                                   \
1356  M(Rem, BinaryOperation)                                               \
1357  M(Return, Instruction)                                                \
1358  M(ReturnVoid, Instruction)                                            \
1359  M(Ror, BinaryOperation)                                               \
1360  M(Shl, BinaryOperation)                                               \
1361  M(Shr, BinaryOperation)                                               \
1362  M(StaticFieldGet, Instruction)                                        \
1363  M(StaticFieldSet, Instruction)                                        \
1364  M(UnresolvedInstanceFieldGet, Instruction)                            \
1365  M(UnresolvedInstanceFieldSet, Instruction)                            \
1366  M(UnresolvedStaticFieldGet, Instruction)                              \
1367  M(UnresolvedStaticFieldSet, Instruction)                              \
1368  M(Select, Instruction)                                                \
1369  M(Sub, BinaryOperation)                                               \
1370  M(SuspendCheck, Instruction)                                          \
1371  M(Throw, Instruction)                                                 \
1372  M(TryBoundary, Instruction)                                           \
1373  M(TypeConversion, Instruction)                                        \
1374  M(UShr, BinaryOperation)                                              \
1375  M(Xor, BinaryOperation)                                               \
1376  M(VecReplicateScalar, VecUnaryOperation)                              \
1377  M(VecExtractScalar, VecUnaryOperation)                                \
1378  M(VecReduce, VecUnaryOperation)                                       \
1379  M(VecCnv, VecUnaryOperation)                                          \
1380  M(VecNeg, VecUnaryOperation)                                          \
1381  M(VecAbs, VecUnaryOperation)                                          \
1382  M(VecNot, VecUnaryOperation)                                          \
1383  M(VecAdd, VecBinaryOperation)                                         \
1384  M(VecHalvingAdd, VecBinaryOperation)                                  \
1385  M(VecSub, VecBinaryOperation)                                         \
1386  M(VecMul, VecBinaryOperation)                                         \
1387  M(VecDiv, VecBinaryOperation)                                         \
1388  M(VecMin, VecBinaryOperation)                                         \
1389  M(VecMax, VecBinaryOperation)                                         \
1390  M(VecAnd, VecBinaryOperation)                                         \
1391  M(VecAndNot, VecBinaryOperation)                                      \
1392  M(VecOr, VecBinaryOperation)                                          \
1393  M(VecXor, VecBinaryOperation)                                         \
1394  M(VecShl, VecBinaryOperation)                                         \
1395  M(VecShr, VecBinaryOperation)                                         \
1396  M(VecUShr, VecBinaryOperation)                                        \
1397  M(VecSetScalars, VecOperation)                                        \
1398  M(VecMultiplyAccumulate, VecOperation)                                \
1399  M(VecSADAccumulate, VecOperation)                                     \
1400  M(VecLoad, VecMemoryOperation)                                        \
1401  M(VecStore, VecMemoryOperation)                                       \
1402
1403/*
1404 * Instructions, shared across several (not all) architectures.
1405 */
1406#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1407#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1408#else
1409#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1410  M(BitwiseNegatedRight, Instruction)                                   \
1411  M(DataProcWithShifterOp, Instruction)                                 \
1412  M(MultiplyAccumulate, Instruction)                                    \
1413  M(IntermediateAddress, Instruction)                                   \
1414  M(IntermediateAddressIndex, Instruction)
1415#endif
1416
1417#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1418
1419#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1420
1421#ifndef ART_ENABLE_CODEGEN_mips
1422#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1423#else
1424#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1425  M(MipsComputeBaseMethodAddress, Instruction)                          \
1426  M(MipsPackedSwitch, Instruction)
1427#endif
1428
1429#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1430
1431#ifndef ART_ENABLE_CODEGEN_x86
1432#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1433#else
1434#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1435  M(X86ComputeBaseMethodAddress, Instruction)                           \
1436  M(X86LoadFromConstantTable, Instruction)                              \
1437  M(X86FPNeg, Instruction)                                              \
1438  M(X86PackedSwitch, Instruction)
1439#endif
1440
1441#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1442
1443#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1444  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1445  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1446  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1447  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1448  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1449  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1450  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1451  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1452
1453#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1454  M(Condition, BinaryOperation)                                         \
1455  M(Constant, Instruction)                                              \
1456  M(UnaryOperation, Instruction)                                        \
1457  M(BinaryOperation, Instruction)                                       \
1458  M(Invoke, Instruction)                                                \
1459  M(VecOperation, Instruction)                                          \
1460  M(VecUnaryOperation, VecOperation)                                    \
1461  M(VecBinaryOperation, VecOperation)                                   \
1462  M(VecMemoryOperation, VecOperation)
1463
1464#define FOR_EACH_INSTRUCTION(M)                                         \
1465  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1466  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1467
1468#define FORWARD_DECLARATION(type, super) class H##type;
1469FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1470#undef FORWARD_DECLARATION
1471
1472#define DECLARE_INSTRUCTION(type)                                         \
1473  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1474  const char* DebugName() const OVERRIDE { return #type; }                \
1475  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1476    return other->Is##type();                                             \
1477  }                                                                       \
1478  void Accept(HGraphVisitor* visitor) OVERRIDE
1479
1480#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1481  bool Is##type() const { return As##type() != nullptr; }               \
1482  const H##type* As##type() const { return this; }                      \
1483  H##type* As##type() { return this; }
1484
1485template <typename T>
1486class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1487                     public IntrusiveForwardListNode<HUseListNode<T>> {
1488 public:
1489  // Get the instruction which has this use as one of the inputs.
1490  T GetUser() const { return user_; }
1491  // Get the position of the input record that this use corresponds to.
1492  size_t GetIndex() const { return index_; }
1493  // Set the position of the input record that this use corresponds to.
1494  void SetIndex(size_t index) { index_ = index; }
1495
1496 private:
1497  HUseListNode(T user, size_t index)
1498      : user_(user), index_(index) {}
1499
1500  T const user_;
1501  size_t index_;
1502
1503  friend class HInstruction;
1504
1505  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1506};
1507
1508template <typename T>
1509using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1510
1511// This class is used by HEnvironment and HInstruction classes to record the
1512// instructions they use and pointers to the corresponding HUseListNodes kept
1513// by the used instructions.
1514template <typename T>
1515class HUserRecord : public ValueObject {
1516 public:
1517  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1518  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1519
1520  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1521      : HUserRecord(old_record.instruction_, before_use_node) {}
1522  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1523      : instruction_(instruction), before_use_node_(before_use_node) {
1524    DCHECK(instruction_ != nullptr);
1525  }
1526
1527  HInstruction* GetInstruction() const { return instruction_; }
1528  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1529  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1530
1531 private:
1532  // Instruction used by the user.
1533  HInstruction* instruction_;
1534
1535  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1536  typename HUseList<T>::iterator before_use_node_;
1537};
1538
1539// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1540// This is used for HInstruction::GetInputs() to return a container wrapper providing
1541// HInstruction* values even though the underlying container has HUserRecord<>s.
1542struct HInputExtractor {
1543  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1544    return record.GetInstruction();
1545  }
1546  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1547    return record.GetInstruction();
1548  }
1549};
1550
1551using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1552using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1553
1554/**
1555 * Side-effects representation.
1556 *
1557 * For write/read dependences on fields/arrays, the dependence analysis uses
1558 * type disambiguation (e.g. a float field write cannot modify the value of an
1559 * integer field read) and the access type (e.g.  a reference array write cannot
1560 * modify the value of a reference field read [although it may modify the
1561 * reference fetch prior to reading the field, which is represented by its own
1562 * write/read dependence]). The analysis makes conservative points-to
1563 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1564 * the same, and any reference read depends on any reference read without
1565 * further regard of its type).
1566 *
1567 * The internal representation uses 38-bit and is described in the table below.
1568 * The first line indicates the side effect, and for field/array accesses the
1569 * second line indicates the type of the access (in the order of the
1570 * DataType::Type enum).
1571 * The two numbered lines below indicate the bit position in the bitfield (read
1572 * vertically).
1573 *
1574 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1575 *   +-------------+---------+---------+--------------+---------+---------+
1576 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1577 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1578 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1579 *
1580 * Note that, to ease the implementation, 'changes' bits are least significant
1581 * bits, while 'dependency' bits are most significant bits.
1582 */
1583class SideEffects : public ValueObject {
1584 public:
1585  SideEffects() : flags_(0) {}
1586
1587  static SideEffects None() {
1588    return SideEffects(0);
1589  }
1590
1591  static SideEffects All() {
1592    return SideEffects(kAllChangeBits | kAllDependOnBits);
1593  }
1594
1595  static SideEffects AllChanges() {
1596    return SideEffects(kAllChangeBits);
1597  }
1598
1599  static SideEffects AllDependencies() {
1600    return SideEffects(kAllDependOnBits);
1601  }
1602
1603  static SideEffects AllExceptGCDependency() {
1604    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1605  }
1606
1607  static SideEffects AllWritesAndReads() {
1608    return SideEffects(kAllWrites | kAllReads);
1609  }
1610
1611  static SideEffects AllWrites() {
1612    return SideEffects(kAllWrites);
1613  }
1614
1615  static SideEffects AllReads() {
1616    return SideEffects(kAllReads);
1617  }
1618
1619  static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1620    return is_volatile
1621        ? AllWritesAndReads()
1622        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1623  }
1624
1625  static SideEffects ArrayWriteOfType(DataType::Type type) {
1626    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1627  }
1628
1629  static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1630    return is_volatile
1631        ? AllWritesAndReads()
1632        : SideEffects(TypeFlag(type, kFieldReadOffset));
1633  }
1634
1635  static SideEffects ArrayReadOfType(DataType::Type type) {
1636    return SideEffects(TypeFlag(type, kArrayReadOffset));
1637  }
1638
1639  static SideEffects CanTriggerGC() {
1640    return SideEffects(1ULL << kCanTriggerGCBit);
1641  }
1642
1643  static SideEffects DependsOnGC() {
1644    return SideEffects(1ULL << kDependsOnGCBit);
1645  }
1646
1647  // Combines the side-effects of this and the other.
1648  SideEffects Union(SideEffects other) const {
1649    return SideEffects(flags_ | other.flags_);
1650  }
1651
1652  SideEffects Exclusion(SideEffects other) const {
1653    return SideEffects(flags_ & ~other.flags_);
1654  }
1655
1656  void Add(SideEffects other) {
1657    flags_ |= other.flags_;
1658  }
1659
1660  bool Includes(SideEffects other) const {
1661    return (other.flags_ & flags_) == other.flags_;
1662  }
1663
1664  bool HasSideEffects() const {
1665    return (flags_ & kAllChangeBits);
1666  }
1667
1668  bool HasDependencies() const {
1669    return (flags_ & kAllDependOnBits);
1670  }
1671
1672  // Returns true if there are no side effects or dependencies.
1673  bool DoesNothing() const {
1674    return flags_ == 0;
1675  }
1676
1677  // Returns true if something is written.
1678  bool DoesAnyWrite() const {
1679    return (flags_ & kAllWrites);
1680  }
1681
1682  // Returns true if something is read.
1683  bool DoesAnyRead() const {
1684    return (flags_ & kAllReads);
1685  }
1686
1687  // Returns true if potentially everything is written and read
1688  // (every type and every kind of access).
1689  bool DoesAllReadWrite() const {
1690    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1691  }
1692
1693  bool DoesAll() const {
1694    return flags_ == (kAllChangeBits | kAllDependOnBits);
1695  }
1696
1697  // Returns true if `this` may read something written by `other`.
1698  bool MayDependOn(SideEffects other) const {
1699    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1700    return (other.flags_ & depends_on_flags);
1701  }
1702
1703  // Returns string representation of flags (for debugging only).
1704  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1705  std::string ToString() const {
1706    std::string flags = "|";
1707    for (int s = kLastBit; s >= 0; s--) {
1708      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1709      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1710        // This is a bit for the GC side effect.
1711        if (current_bit_is_set) {
1712          flags += "GC";
1713        }
1714        flags += "|";
1715      } else {
1716        // This is a bit for the array/field analysis.
1717        // The underscore character stands for the 'can trigger GC' bit.
1718        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1719        if (current_bit_is_set) {
1720          flags += kDebug[s];
1721        }
1722        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1723            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1724          flags += "|";
1725        }
1726      }
1727    }
1728    return flags;
1729  }
1730
1731  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1732
1733 private:
1734  static constexpr int kFieldArrayAnalysisBits = 9;
1735
1736  static constexpr int kFieldWriteOffset = 0;
1737  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1738  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1739  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1740
1741  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1742
1743  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1744  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1745  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1746  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1747
1748  static constexpr int kLastBit = kDependsOnGCBit;
1749  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1750
1751  // Aliases.
1752
1753  static_assert(kChangeBits == kDependOnBits,
1754                "the 'change' bits should match the 'depend on' bits.");
1755
1756  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1757  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1758  static constexpr uint64_t kAllWrites =
1759      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1760  static constexpr uint64_t kAllReads =
1761      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1762
1763  // Translates type to bit flag. The type must correspond to a Java type.
1764  static uint64_t TypeFlag(DataType::Type type, int offset) {
1765    int shift;
1766    switch (type) {
1767      case DataType::Type::kReference: shift = 0; break;
1768      case DataType::Type::kBool:      shift = 1; break;
1769      case DataType::Type::kInt8:      shift = 2; break;
1770      case DataType::Type::kUint16:    shift = 3; break;
1771      case DataType::Type::kInt16:     shift = 4; break;
1772      case DataType::Type::kInt32:     shift = 5; break;
1773      case DataType::Type::kInt64:     shift = 6; break;
1774      case DataType::Type::kFloat32:   shift = 7; break;
1775      case DataType::Type::kFloat64:   shift = 8; break;
1776      default:
1777        LOG(FATAL) << "Unexpected data type " << type;
1778        UNREACHABLE();
1779    }
1780    DCHECK_LE(kFieldWriteOffset, shift);
1781    DCHECK_LT(shift, kArrayWriteOffset);
1782    return UINT64_C(1) << (shift + offset);
1783  }
1784
1785  // Private constructor on direct flags value.
1786  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1787
1788  uint64_t flags_;
1789};
1790
1791// A HEnvironment object contains the values of virtual registers at a given location.
1792class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1793 public:
1794  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1795                             size_t number_of_vregs,
1796                             ArtMethod* method,
1797                             uint32_t dex_pc,
1798                             HInstruction* holder)
1799     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1800       locations_(arena->Adapter(kArenaAllocEnvironmentLocations)),
1801       parent_(nullptr),
1802       method_(method),
1803       dex_pc_(dex_pc),
1804       holder_(holder) {
1805  }
1806
1807  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1808      : HEnvironment(arena,
1809                     to_copy.Size(),
1810                     to_copy.GetMethod(),
1811                     to_copy.GetDexPc(),
1812                     holder) {}
1813
1814  void AllocateLocations() {
1815    DCHECK(locations_.empty());
1816    locations_.resize(vregs_.size());
1817  }
1818
1819  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1820    if (parent_ != nullptr) {
1821      parent_->SetAndCopyParentChain(allocator, parent);
1822    } else {
1823      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1824      parent_->CopyFrom(parent);
1825      if (parent->GetParent() != nullptr) {
1826        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1827      }
1828    }
1829  }
1830
1831  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1832  void CopyFrom(HEnvironment* environment);
1833
1834  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1835  // input to the loop phi instead. This is for inserting instructions that
1836  // require an environment (like HDeoptimization) in the loop pre-header.
1837  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1838
1839  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1840    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1841  }
1842
1843  HInstruction* GetInstructionAt(size_t index) const {
1844    return vregs_[index].GetInstruction();
1845  }
1846
1847  void RemoveAsUserOfInput(size_t index) const;
1848
1849  size_t Size() const { return vregs_.size(); }
1850
1851  HEnvironment* GetParent() const { return parent_; }
1852
1853  void SetLocationAt(size_t index, Location location) {
1854    locations_[index] = location;
1855  }
1856
1857  Location GetLocationAt(size_t index) const {
1858    return locations_[index];
1859  }
1860
1861  uint32_t GetDexPc() const {
1862    return dex_pc_;
1863  }
1864
1865  ArtMethod* GetMethod() const {
1866    return method_;
1867  }
1868
1869  HInstruction* GetHolder() const {
1870    return holder_;
1871  }
1872
1873
1874  bool IsFromInlinedInvoke() const {
1875    return GetParent() != nullptr;
1876  }
1877
1878 private:
1879  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1880  ArenaVector<Location> locations_;
1881  HEnvironment* parent_;
1882  ArtMethod* method_;
1883  const uint32_t dex_pc_;
1884
1885  // The instruction that holds this environment.
1886  HInstruction* const holder_;
1887
1888  friend class HInstruction;
1889
1890  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1891};
1892
1893class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1894 public:
1895  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1896      : previous_(nullptr),
1897        next_(nullptr),
1898        block_(nullptr),
1899        dex_pc_(dex_pc),
1900        id_(-1),
1901        ssa_index_(-1),
1902        packed_fields_(0u),
1903        environment_(nullptr),
1904        locations_(nullptr),
1905        live_interval_(nullptr),
1906        lifetime_position_(kNoLifetime),
1907        side_effects_(side_effects),
1908        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1909    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1910  }
1911
1912  virtual ~HInstruction() {}
1913
1914#define DECLARE_KIND(type, super) k##type,
1915  enum InstructionKind {
1916    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1917  };
1918#undef DECLARE_KIND
1919
1920  HInstruction* GetNext() const { return next_; }
1921  HInstruction* GetPrevious() const { return previous_; }
1922
1923  HInstruction* GetNextDisregardingMoves() const;
1924  HInstruction* GetPreviousDisregardingMoves() const;
1925
1926  HBasicBlock* GetBlock() const { return block_; }
1927  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1928  void SetBlock(HBasicBlock* block) { block_ = block; }
1929  bool IsInBlock() const { return block_ != nullptr; }
1930  bool IsInLoop() const { return block_->IsInLoop(); }
1931  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1932  bool IsIrreducibleLoopHeaderPhi() const {
1933    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1934  }
1935
1936  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1937
1938  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1939    // One virtual method is enough, just const_cast<> and then re-add the const.
1940    return ArrayRef<const HUserRecord<HInstruction*>>(
1941        const_cast<HInstruction*>(this)->GetInputRecords());
1942  }
1943
1944  HInputsRef GetInputs() {
1945    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1946  }
1947
1948  HConstInputsRef GetInputs() const {
1949    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1950  }
1951
1952  size_t InputCount() const { return GetInputRecords().size(); }
1953  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1954
1955  bool HasInput(HInstruction* input) const {
1956    for (const HInstruction* i : GetInputs()) {
1957      if (i == input) {
1958        return true;
1959      }
1960    }
1961    return false;
1962  }
1963
1964  void SetRawInputAt(size_t index, HInstruction* input) {
1965    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1966  }
1967
1968  virtual void Accept(HGraphVisitor* visitor) = 0;
1969  virtual const char* DebugName() const = 0;
1970
1971  virtual DataType::Type GetType() const { return DataType::Type::kVoid; }
1972
1973  virtual bool NeedsEnvironment() const { return false; }
1974
1975  uint32_t GetDexPc() const { return dex_pc_; }
1976
1977  virtual bool IsControlFlow() const { return false; }
1978
1979  // Can the instruction throw?
1980  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1981  // could throw OOME, but it is still OK to remove them if they are unused.
1982  virtual bool CanThrow() const { return false; }
1983  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1984
1985  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1986  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1987
1988  // Does not apply for all instructions, but having this at top level greatly
1989  // simplifies the null check elimination.
1990  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1991  virtual bool CanBeNull() const {
1992    DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
1993    return true;
1994  }
1995
1996  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1997    return false;
1998  }
1999
2000  virtual bool IsActualObject() const {
2001    return GetType() == DataType::Type::kReference;
2002  }
2003
2004  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2005
2006  ReferenceTypeInfo GetReferenceTypeInfo() const {
2007    DCHECK_EQ(GetType(), DataType::Type::kReference);
2008    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2009                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
2010  }
2011
2012  void AddUseAt(HInstruction* user, size_t index) {
2013    DCHECK(user != nullptr);
2014    // Note: fixup_end remains valid across push_front().
2015    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2016    HUseListNode<HInstruction*>* new_node =
2017        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
2018    uses_.push_front(*new_node);
2019    FixUpUserRecordsAfterUseInsertion(fixup_end);
2020  }
2021
2022  void AddEnvUseAt(HEnvironment* user, size_t index) {
2023    DCHECK(user != nullptr);
2024    // Note: env_fixup_end remains valid across push_front().
2025    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2026    HUseListNode<HEnvironment*>* new_node =
2027        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2028    env_uses_.push_front(*new_node);
2029    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2030  }
2031
2032  void RemoveAsUserOfInput(size_t input) {
2033    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2034    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2035    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2036    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2037  }
2038
2039  void RemoveAsUserOfAllInputs() {
2040    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2041      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2042      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2043      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2044    }
2045  }
2046
2047  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2048  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2049
2050  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2051  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2052  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2053  bool HasOnlyOneNonEnvironmentUse() const {
2054    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2055  }
2056
2057  bool IsRemovable() const {
2058    return
2059        !DoesAnyWrite() &&
2060        !CanThrow() &&
2061        !IsSuspendCheck() &&
2062        !IsControlFlow() &&
2063        !IsNativeDebugInfo() &&
2064        !IsParameterValue() &&
2065        // If we added an explicit barrier then we should keep it.
2066        !IsMemoryBarrier() &&
2067        !IsConstructorFence();
2068  }
2069
2070  bool IsDeadAndRemovable() const {
2071    return IsRemovable() && !HasUses();
2072  }
2073
2074  // Does this instruction strictly dominate `other_instruction`?
2075  // Returns false if this instruction and `other_instruction` are the same.
2076  // Aborts if this instruction and `other_instruction` are both phis.
2077  bool StrictlyDominates(HInstruction* other_instruction) const;
2078
2079  int GetId() const { return id_; }
2080  void SetId(int id) { id_ = id; }
2081
2082  int GetSsaIndex() const { return ssa_index_; }
2083  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2084  bool HasSsaIndex() const { return ssa_index_ != -1; }
2085
2086  bool HasEnvironment() const { return environment_ != nullptr; }
2087  HEnvironment* GetEnvironment() const { return environment_; }
2088  // Set the `environment_` field. Raw because this method does not
2089  // update the uses lists.
2090  void SetRawEnvironment(HEnvironment* environment) {
2091    DCHECK(environment_ == nullptr);
2092    DCHECK_EQ(environment->GetHolder(), this);
2093    environment_ = environment;
2094  }
2095
2096  void InsertRawEnvironment(HEnvironment* environment) {
2097    DCHECK(environment_ != nullptr);
2098    DCHECK_EQ(environment->GetHolder(), this);
2099    DCHECK(environment->GetParent() == nullptr);
2100    environment->parent_ = environment_;
2101    environment_ = environment;
2102  }
2103
2104  void RemoveEnvironment();
2105
2106  // Set the environment of this instruction, copying it from `environment`. While
2107  // copying, the uses lists are being updated.
2108  void CopyEnvironmentFrom(HEnvironment* environment) {
2109    DCHECK(environment_ == nullptr);
2110    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2111    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2112    environment_->CopyFrom(environment);
2113    if (environment->GetParent() != nullptr) {
2114      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2115    }
2116  }
2117
2118  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2119                                                HBasicBlock* block) {
2120    DCHECK(environment_ == nullptr);
2121    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2122    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2123    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2124    if (environment->GetParent() != nullptr) {
2125      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2126    }
2127  }
2128
2129  // Returns the number of entries in the environment. Typically, that is the
2130  // number of dex registers in a method. It could be more in case of inlining.
2131  size_t EnvironmentSize() const;
2132
2133  LocationSummary* GetLocations() const { return locations_; }
2134  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2135
2136  void ReplaceWith(HInstruction* instruction);
2137  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2138  void ReplaceInput(HInstruction* replacement, size_t index);
2139
2140  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2141  // uses of this instruction by `other` are *not* updated.
2142  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2143    ReplaceWith(other);
2144    other->ReplaceInput(this, use_index);
2145  }
2146
2147  // Move `this` instruction before `cursor`
2148  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2149
2150  // Move `this` before its first user and out of any loops. If there is no
2151  // out-of-loop user that dominates all other users, move the instruction
2152  // to the end of the out-of-loop common dominator of the user's blocks.
2153  //
2154  // This can be used only on non-throwing instructions with no side effects that
2155  // have at least one use but no environment uses.
2156  void MoveBeforeFirstUserAndOutOfLoops();
2157
2158#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2159  bool Is##type() const;                                                       \
2160  const H##type* As##type() const;                                             \
2161  H##type* As##type();
2162
2163  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2164#undef INSTRUCTION_TYPE_CHECK
2165
2166#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2167  bool Is##type() const { return (As##type() != nullptr); }                    \
2168  virtual const H##type* As##type() const { return nullptr; }                  \
2169  virtual H##type* As##type() { return nullptr; }
2170  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2171#undef INSTRUCTION_TYPE_CHECK
2172
2173  // Returns whether the instruction can be moved within the graph.
2174  // TODO: this method is used by LICM and GVN with possibly different
2175  //       meanings? split and rename?
2176  virtual bool CanBeMoved() const { return false; }
2177
2178  // Returns whether the two instructions are of the same kind.
2179  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2180    return false;
2181  }
2182
2183  // Returns whether any data encoded in the two instructions is equal.
2184  // This method does not look at the inputs. Both instructions must be
2185  // of the same type, otherwise the method has undefined behavior.
2186  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2187    return false;
2188  }
2189
2190  // Returns whether two instructions are equal, that is:
2191  // 1) They have the same type and contain the same data (InstructionDataEquals).
2192  // 2) Their inputs are identical.
2193  bool Equals(const HInstruction* other) const;
2194
2195  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2196  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2197  // the virtual function because the __attribute__((__pure__)) doesn't really
2198  // apply the strong requirement for virtual functions, preventing optimizations.
2199  InstructionKind GetKind() const PURE;
2200  virtual InstructionKind GetKindInternal() const = 0;
2201
2202  virtual size_t ComputeHashCode() const {
2203    size_t result = GetKind();
2204    for (const HInstruction* input : GetInputs()) {
2205      result = (result * 31) + input->GetId();
2206    }
2207    return result;
2208  }
2209
2210  SideEffects GetSideEffects() const { return side_effects_; }
2211  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2212  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2213
2214  size_t GetLifetimePosition() const { return lifetime_position_; }
2215  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2216  LiveInterval* GetLiveInterval() const { return live_interval_; }
2217  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2218  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2219
2220  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2221
2222  // Returns whether the code generation of the instruction will require to have access
2223  // to the current method. Such instructions are:
2224  // (1): Instructions that require an environment, as calling the runtime requires
2225  //      to walk the stack and have the current method stored at a specific stack address.
2226  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2227  //      to access the dex cache.
2228  bool NeedsCurrentMethod() const {
2229    return NeedsEnvironment() || IsCurrentMethod();
2230  }
2231
2232  // Returns whether the code generation of the instruction will require to have access
2233  // to the dex cache of the current method's declaring class via the current method.
2234  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2235
2236  // Does this instruction have any use in an environment before
2237  // control flow hits 'other'?
2238  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2239
2240  // Remove all references to environment uses of this instruction.
2241  // The caller must ensure that this is safe to do.
2242  void RemoveEnvironmentUsers();
2243
2244  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2245  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2246
2247 protected:
2248  // If set, the machine code for this instruction is assumed to be generated by
2249  // its users. Used by liveness analysis to compute use positions accordingly.
2250  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2251  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2252  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2253  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2254
2255  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2256    return GetInputRecords()[i];
2257  }
2258
2259  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2260    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2261    input_records[index] = input;
2262  }
2263
2264  uint32_t GetPackedFields() const {
2265    return packed_fields_;
2266  }
2267
2268  template <size_t flag>
2269  bool GetPackedFlag() const {
2270    return (packed_fields_ & (1u << flag)) != 0u;
2271  }
2272
2273  template <size_t flag>
2274  void SetPackedFlag(bool value = true) {
2275    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2276  }
2277
2278  template <typename BitFieldType>
2279  typename BitFieldType::value_type GetPackedField() const {
2280    return BitFieldType::Decode(packed_fields_);
2281  }
2282
2283  template <typename BitFieldType>
2284  void SetPackedField(typename BitFieldType::value_type value) {
2285    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2286    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2287  }
2288
2289 private:
2290  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2291    auto before_use_node = uses_.before_begin();
2292    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2293      HInstruction* user = use_node->GetUser();
2294      size_t input_index = use_node->GetIndex();
2295      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2296      before_use_node = use_node;
2297    }
2298  }
2299
2300  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2301    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2302    if (next != uses_.end()) {
2303      HInstruction* next_user = next->GetUser();
2304      size_t next_index = next->GetIndex();
2305      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2306      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2307    }
2308  }
2309
2310  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2311    auto before_env_use_node = env_uses_.before_begin();
2312    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2313      HEnvironment* user = env_use_node->GetUser();
2314      size_t input_index = env_use_node->GetIndex();
2315      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2316      before_env_use_node = env_use_node;
2317    }
2318  }
2319
2320  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2321    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2322    if (next != env_uses_.end()) {
2323      HEnvironment* next_user = next->GetUser();
2324      size_t next_index = next->GetIndex();
2325      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2326      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2327    }
2328  }
2329
2330  HInstruction* previous_;
2331  HInstruction* next_;
2332  HBasicBlock* block_;
2333  const uint32_t dex_pc_;
2334
2335  // An instruction gets an id when it is added to the graph.
2336  // It reflects creation order. A negative id means the instruction
2337  // has not been added to the graph.
2338  int id_;
2339
2340  // When doing liveness analysis, instructions that have uses get an SSA index.
2341  int ssa_index_;
2342
2343  // Packed fields.
2344  uint32_t packed_fields_;
2345
2346  // List of instructions that have this instruction as input.
2347  HUseList<HInstruction*> uses_;
2348
2349  // List of environments that contain this instruction.
2350  HUseList<HEnvironment*> env_uses_;
2351
2352  // The environment associated with this instruction. Not null if the instruction
2353  // might jump out of the method.
2354  HEnvironment* environment_;
2355
2356  // Set by the code generator.
2357  LocationSummary* locations_;
2358
2359  // Set by the liveness analysis.
2360  LiveInterval* live_interval_;
2361
2362  // Set by the liveness analysis, this is the position in a linear
2363  // order of blocks where this instruction's live interval start.
2364  size_t lifetime_position_;
2365
2366  SideEffects side_effects_;
2367
2368  // The reference handle part of the reference type info.
2369  // The IsExact() flag is stored in packed fields.
2370  // TODO: for primitive types this should be marked as invalid.
2371  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2372
2373  friend class GraphChecker;
2374  friend class HBasicBlock;
2375  friend class HEnvironment;
2376  friend class HGraph;
2377  friend class HInstructionList;
2378
2379  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2380};
2381std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2382
2383// Iterates over the instructions, while preserving the next instruction
2384// in case the current instruction gets removed from the list by the user
2385// of this iterator.
2386class HInstructionIterator : public ValueObject {
2387 public:
2388  explicit HInstructionIterator(const HInstructionList& instructions)
2389      : instruction_(instructions.first_instruction_) {
2390    next_ = Done() ? nullptr : instruction_->GetNext();
2391  }
2392
2393  bool Done() const { return instruction_ == nullptr; }
2394  HInstruction* Current() const { return instruction_; }
2395  void Advance() {
2396    instruction_ = next_;
2397    next_ = Done() ? nullptr : instruction_->GetNext();
2398  }
2399
2400 private:
2401  HInstruction* instruction_;
2402  HInstruction* next_;
2403
2404  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2405};
2406
2407// Iterates over the instructions without saving the next instruction,
2408// therefore handling changes in the graph potentially made by the user
2409// of this iterator.
2410class HInstructionIteratorHandleChanges : public ValueObject {
2411 public:
2412  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2413      : instruction_(instructions.first_instruction_) {
2414  }
2415
2416  bool Done() const { return instruction_ == nullptr; }
2417  HInstruction* Current() const { return instruction_; }
2418  void Advance() {
2419    instruction_ = instruction_->GetNext();
2420  }
2421
2422 private:
2423  HInstruction* instruction_;
2424
2425  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2426};
2427
2428
2429class HBackwardInstructionIterator : public ValueObject {
2430 public:
2431  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2432      : instruction_(instructions.last_instruction_) {
2433    next_ = Done() ? nullptr : instruction_->GetPrevious();
2434  }
2435
2436  bool Done() const { return instruction_ == nullptr; }
2437  HInstruction* Current() const { return instruction_; }
2438  void Advance() {
2439    instruction_ = next_;
2440    next_ = Done() ? nullptr : instruction_->GetPrevious();
2441  }
2442
2443 private:
2444  HInstruction* instruction_;
2445  HInstruction* next_;
2446
2447  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2448};
2449
2450class HVariableInputSizeInstruction : public HInstruction {
2451 public:
2452  using HInstruction::GetInputRecords;  // Keep the const version visible.
2453  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2454    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2455  }
2456
2457  void AddInput(HInstruction* input);
2458  void InsertInputAt(size_t index, HInstruction* input);
2459  void RemoveInputAt(size_t index);
2460
2461  // Removes all the inputs.
2462  // Also removes this instructions from each input's use list
2463  // (for non-environment uses only).
2464  void RemoveAllInputs();
2465
2466 protected:
2467  HVariableInputSizeInstruction(SideEffects side_effects,
2468                                uint32_t dex_pc,
2469                                ArenaAllocator* arena,
2470                                size_t number_of_inputs,
2471                                ArenaAllocKind kind)
2472      : HInstruction(side_effects, dex_pc),
2473        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2474
2475  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2476
2477 private:
2478  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2479};
2480
2481template<size_t N>
2482class HTemplateInstruction: public HInstruction {
2483 public:
2484  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2485      : HInstruction(side_effects, dex_pc), inputs_() {}
2486  virtual ~HTemplateInstruction() {}
2487
2488  using HInstruction::GetInputRecords;  // Keep the const version visible.
2489  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2490    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2491  }
2492
2493 private:
2494  std::array<HUserRecord<HInstruction*>, N> inputs_;
2495
2496  friend class SsaBuilder;
2497};
2498
2499// HTemplateInstruction specialization for N=0.
2500template<>
2501class HTemplateInstruction<0>: public HInstruction {
2502 public:
2503  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2504      : HInstruction(side_effects, dex_pc) {}
2505
2506  virtual ~HTemplateInstruction() {}
2507
2508  using HInstruction::GetInputRecords;  // Keep the const version visible.
2509  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2510    return ArrayRef<HUserRecord<HInstruction*>>();
2511  }
2512
2513 private:
2514  friend class SsaBuilder;
2515};
2516
2517template<intptr_t N>
2518class HExpression : public HTemplateInstruction<N> {
2519 public:
2520  HExpression<N>(DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2521      : HTemplateInstruction<N>(side_effects, dex_pc) {
2522    this->template SetPackedField<TypeField>(type);
2523  }
2524  virtual ~HExpression() {}
2525
2526  DataType::Type GetType() const OVERRIDE {
2527    return TypeField::Decode(this->GetPackedFields());
2528  }
2529
2530 protected:
2531  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2532  static constexpr size_t kFieldTypeSize =
2533      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2534  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2535  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2536                "Too many packed fields.");
2537  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2538};
2539
2540// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2541// instruction that branches to the exit block.
2542class HReturnVoid FINAL : public HTemplateInstruction<0> {
2543 public:
2544  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2545      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2546
2547  bool IsControlFlow() const OVERRIDE { return true; }
2548
2549  DECLARE_INSTRUCTION(ReturnVoid);
2550
2551 private:
2552  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2553};
2554
2555// Represents dex's RETURN opcodes. A HReturn is a control flow
2556// instruction that branches to the exit block.
2557class HReturn FINAL : public HTemplateInstruction<1> {
2558 public:
2559  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2560      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2561    SetRawInputAt(0, value);
2562  }
2563
2564  bool IsControlFlow() const OVERRIDE { return true; }
2565
2566  DECLARE_INSTRUCTION(Return);
2567
2568 private:
2569  DISALLOW_COPY_AND_ASSIGN(HReturn);
2570};
2571
2572class HPhi FINAL : public HVariableInputSizeInstruction {
2573 public:
2574  HPhi(ArenaAllocator* arena,
2575       uint32_t reg_number,
2576       size_t number_of_inputs,
2577       DataType::Type type,
2578       uint32_t dex_pc = kNoDexPc)
2579      : HVariableInputSizeInstruction(
2580            SideEffects::None(),
2581            dex_pc,
2582            arena,
2583            number_of_inputs,
2584            kArenaAllocPhiInputs),
2585        reg_number_(reg_number) {
2586    SetPackedField<TypeField>(ToPhiType(type));
2587    DCHECK_NE(GetType(), DataType::Type::kVoid);
2588    // Phis are constructed live and marked dead if conflicting or unused.
2589    // Individual steps of SsaBuilder should assume that if a phi has been
2590    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2591    SetPackedFlag<kFlagIsLive>(true);
2592    SetPackedFlag<kFlagCanBeNull>(true);
2593  }
2594
2595  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2596  static DataType::Type ToPhiType(DataType::Type type) {
2597    return DataType::Kind(type);
2598  }
2599
2600  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2601
2602  DataType::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2603  void SetType(DataType::Type new_type) {
2604    // Make sure that only valid type changes occur. The following are allowed:
2605    //  (1) int  -> float/ref (primitive type propagation),
2606    //  (2) long -> double (primitive type propagation).
2607    DCHECK(GetType() == new_type ||
2608           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2609           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2610           (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2611    SetPackedField<TypeField>(new_type);
2612  }
2613
2614  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2615  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2616
2617  uint32_t GetRegNumber() const { return reg_number_; }
2618
2619  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2620  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2621  bool IsDead() const { return !IsLive(); }
2622  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2623
2624  bool IsVRegEquivalentOf(const HInstruction* other) const {
2625    return other != nullptr
2626        && other->IsPhi()
2627        && other->AsPhi()->GetBlock() == GetBlock()
2628        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2629  }
2630
2631  bool HasEquivalentPhi() const {
2632    if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2633      return true;
2634    }
2635    if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2636      return true;
2637    }
2638    return false;
2639  }
2640
2641  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2642  // An equivalent phi is a phi having the same dex register and type.
2643  // It assumes that phis with the same dex register are adjacent.
2644  HPhi* GetNextEquivalentPhiWithSameType() {
2645    HInstruction* next = GetNext();
2646    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2647      if (next->GetType() == GetType()) {
2648        return next->AsPhi();
2649      }
2650      next = next->GetNext();
2651    }
2652    return nullptr;
2653  }
2654
2655  DECLARE_INSTRUCTION(Phi);
2656
2657 private:
2658  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2659  static constexpr size_t kFieldTypeSize =
2660      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2661  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2662  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2663  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2664  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2665  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2666
2667  const uint32_t reg_number_;
2668
2669  DISALLOW_COPY_AND_ASSIGN(HPhi);
2670};
2671
2672// The exit instruction is the only instruction of the exit block.
2673// Instructions aborting the method (HThrow and HReturn) must branch to the
2674// exit block.
2675class HExit FINAL : public HTemplateInstruction<0> {
2676 public:
2677  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2678
2679  bool IsControlFlow() const OVERRIDE { return true; }
2680
2681  DECLARE_INSTRUCTION(Exit);
2682
2683 private:
2684  DISALLOW_COPY_AND_ASSIGN(HExit);
2685};
2686
2687// Jumps from one block to another.
2688class HGoto FINAL : public HTemplateInstruction<0> {
2689 public:
2690  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2691
2692  bool IsControlFlow() const OVERRIDE { return true; }
2693
2694  HBasicBlock* GetSuccessor() const {
2695    return GetBlock()->GetSingleSuccessor();
2696  }
2697
2698  DECLARE_INSTRUCTION(Goto);
2699
2700 private:
2701  DISALLOW_COPY_AND_ASSIGN(HGoto);
2702};
2703
2704class HConstant : public HExpression<0> {
2705 public:
2706  explicit HConstant(DataType::Type type, uint32_t dex_pc = kNoDexPc)
2707      : HExpression(type, SideEffects::None(), dex_pc) {}
2708
2709  bool CanBeMoved() const OVERRIDE { return true; }
2710
2711  // Is this constant -1 in the arithmetic sense?
2712  virtual bool IsMinusOne() const { return false; }
2713  // Is this constant 0 in the arithmetic sense?
2714  virtual bool IsArithmeticZero() const { return false; }
2715  // Is this constant a 0-bit pattern?
2716  virtual bool IsZeroBitPattern() const { return false; }
2717  // Is this constant 1 in the arithmetic sense?
2718  virtual bool IsOne() const { return false; }
2719
2720  virtual uint64_t GetValueAsUint64() const = 0;
2721
2722  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2723
2724 private:
2725  DISALLOW_COPY_AND_ASSIGN(HConstant);
2726};
2727
2728class HNullConstant FINAL : public HConstant {
2729 public:
2730  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2731    return true;
2732  }
2733
2734  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2735
2736  size_t ComputeHashCode() const OVERRIDE { return 0; }
2737
2738  // The null constant representation is a 0-bit pattern.
2739  virtual bool IsZeroBitPattern() const { return true; }
2740
2741  DECLARE_INSTRUCTION(NullConstant);
2742
2743 private:
2744  explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
2745      : HConstant(DataType::Type::kReference, dex_pc) {}
2746
2747  friend class HGraph;
2748  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2749};
2750
2751// Constants of the type int. Those can be from Dex instructions, or
2752// synthesized (for example with the if-eqz instruction).
2753class HIntConstant FINAL : public HConstant {
2754 public:
2755  int32_t GetValue() const { return value_; }
2756
2757  uint64_t GetValueAsUint64() const OVERRIDE {
2758    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2759  }
2760
2761  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2762    DCHECK(other->IsIntConstant()) << other->DebugName();
2763    return other->AsIntConstant()->value_ == value_;
2764  }
2765
2766  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2767
2768  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2769  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2770  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2771  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2772
2773  // Integer constants are used to encode Boolean values as well,
2774  // where 1 means true and 0 means false.
2775  bool IsTrue() const { return GetValue() == 1; }
2776  bool IsFalse() const { return GetValue() == 0; }
2777
2778  DECLARE_INSTRUCTION(IntConstant);
2779
2780 private:
2781  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2782      : HConstant(DataType::Type::kInt32, dex_pc), value_(value) {}
2783  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2784      : HConstant(DataType::Type::kInt32, dex_pc), value_(value ? 1 : 0) {}
2785
2786  const int32_t value_;
2787
2788  friend class HGraph;
2789  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2790  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2791  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2792};
2793
2794class HLongConstant FINAL : public HConstant {
2795 public:
2796  int64_t GetValue() const { return value_; }
2797
2798  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2799
2800  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2801    DCHECK(other->IsLongConstant()) << other->DebugName();
2802    return other->AsLongConstant()->value_ == value_;
2803  }
2804
2805  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2806
2807  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2808  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2809  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2810  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2811
2812  DECLARE_INSTRUCTION(LongConstant);
2813
2814 private:
2815  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2816      : HConstant(DataType::Type::kInt64, dex_pc), value_(value) {}
2817
2818  const int64_t value_;
2819
2820  friend class HGraph;
2821  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2822};
2823
2824class HFloatConstant FINAL : public HConstant {
2825 public:
2826  float GetValue() const { return value_; }
2827
2828  uint64_t GetValueAsUint64() const OVERRIDE {
2829    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2830  }
2831
2832  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2833    DCHECK(other->IsFloatConstant()) << other->DebugName();
2834    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2835  }
2836
2837  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2838
2839  bool IsMinusOne() const OVERRIDE {
2840    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2841  }
2842  bool IsArithmeticZero() const OVERRIDE {
2843    return std::fpclassify(value_) == FP_ZERO;
2844  }
2845  bool IsArithmeticPositiveZero() const {
2846    return IsArithmeticZero() && !std::signbit(value_);
2847  }
2848  bool IsArithmeticNegativeZero() const {
2849    return IsArithmeticZero() && std::signbit(value_);
2850  }
2851  bool IsZeroBitPattern() const OVERRIDE {
2852    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2853  }
2854  bool IsOne() const OVERRIDE {
2855    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2856  }
2857  bool IsNaN() const {
2858    return std::isnan(value_);
2859  }
2860
2861  DECLARE_INSTRUCTION(FloatConstant);
2862
2863 private:
2864  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2865      : HConstant(DataType::Type::kFloat32, dex_pc), value_(value) {}
2866  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2867      : HConstant(DataType::Type::kFloat32, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2868
2869  const float value_;
2870
2871  // Only the SsaBuilder and HGraph can create floating-point constants.
2872  friend class SsaBuilder;
2873  friend class HGraph;
2874  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2875};
2876
2877class HDoubleConstant FINAL : public HConstant {
2878 public:
2879  double GetValue() const { return value_; }
2880
2881  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2882
2883  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2884    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2885    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2886  }
2887
2888  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2889
2890  bool IsMinusOne() const OVERRIDE {
2891    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2892  }
2893  bool IsArithmeticZero() const OVERRIDE {
2894    return std::fpclassify(value_) == FP_ZERO;
2895  }
2896  bool IsArithmeticPositiveZero() const {
2897    return IsArithmeticZero() && !std::signbit(value_);
2898  }
2899  bool IsArithmeticNegativeZero() const {
2900    return IsArithmeticZero() && std::signbit(value_);
2901  }
2902  bool IsZeroBitPattern() const OVERRIDE {
2903    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2904  }
2905  bool IsOne() const OVERRIDE {
2906    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2907  }
2908  bool IsNaN() const {
2909    return std::isnan(value_);
2910  }
2911
2912  DECLARE_INSTRUCTION(DoubleConstant);
2913
2914 private:
2915  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2916      : HConstant(DataType::Type::kFloat64, dex_pc), value_(value) {}
2917  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2918      : HConstant(DataType::Type::kFloat64, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2919
2920  const double value_;
2921
2922  // Only the SsaBuilder and HGraph can create floating-point constants.
2923  friend class SsaBuilder;
2924  friend class HGraph;
2925  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2926};
2927
2928// Conditional branch. A block ending with an HIf instruction must have
2929// two successors.
2930class HIf FINAL : public HTemplateInstruction<1> {
2931 public:
2932  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2933      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2934    SetRawInputAt(0, input);
2935  }
2936
2937  bool IsControlFlow() const OVERRIDE { return true; }
2938
2939  HBasicBlock* IfTrueSuccessor() const {
2940    return GetBlock()->GetSuccessors()[0];
2941  }
2942
2943  HBasicBlock* IfFalseSuccessor() const {
2944    return GetBlock()->GetSuccessors()[1];
2945  }
2946
2947  DECLARE_INSTRUCTION(If);
2948
2949 private:
2950  DISALLOW_COPY_AND_ASSIGN(HIf);
2951};
2952
2953
2954// Abstract instruction which marks the beginning and/or end of a try block and
2955// links it to the respective exception handlers. Behaves the same as a Goto in
2956// non-exceptional control flow.
2957// Normal-flow successor is stored at index zero, exception handlers under
2958// higher indices in no particular order.
2959class HTryBoundary FINAL : public HTemplateInstruction<0> {
2960 public:
2961  enum class BoundaryKind {
2962    kEntry,
2963    kExit,
2964    kLast = kExit
2965  };
2966
2967  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2968      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2969    SetPackedField<BoundaryKindField>(kind);
2970  }
2971
2972  bool IsControlFlow() const OVERRIDE { return true; }
2973
2974  // Returns the block's non-exceptional successor (index zero).
2975  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2976
2977  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2978    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2979  }
2980
2981  // Returns whether `handler` is among its exception handlers (non-zero index
2982  // successors).
2983  bool HasExceptionHandler(const HBasicBlock& handler) const {
2984    DCHECK(handler.IsCatchBlock());
2985    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2986  }
2987
2988  // If not present already, adds `handler` to its block's list of exception
2989  // handlers.
2990  void AddExceptionHandler(HBasicBlock* handler) {
2991    if (!HasExceptionHandler(*handler)) {
2992      GetBlock()->AddSuccessor(handler);
2993    }
2994  }
2995
2996  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2997  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2998
2999  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3000
3001  DECLARE_INSTRUCTION(TryBoundary);
3002
3003 private:
3004  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3005  static constexpr size_t kFieldBoundaryKindSize =
3006      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3007  static constexpr size_t kNumberOfTryBoundaryPackedBits =
3008      kFieldBoundaryKind + kFieldBoundaryKindSize;
3009  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3010                "Too many packed fields.");
3011  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3012
3013  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
3014};
3015
3016// Deoptimize to interpreter, upon checking a condition.
3017class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3018 public:
3019  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3020  // across.
3021  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc)
3022      : HVariableInputSizeInstruction(
3023            SideEffects::All(),
3024            dex_pc,
3025            arena,
3026            /* number_of_inputs */ 1,
3027            kArenaAllocMisc) {
3028    SetPackedFlag<kFieldCanBeMoved>(false);
3029    SetPackedField<DeoptimizeKindField>(kind);
3030    SetRawInputAt(0, cond);
3031  }
3032
3033  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3034  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3035  // instead of `guard`.
3036  // We set CanTriggerGC to prevent any intermediate address to be live
3037  // at the point of the `HDeoptimize`.
3038  HDeoptimize(ArenaAllocator* arena,
3039              HInstruction* cond,
3040              HInstruction* guard,
3041              DeoptimizationKind kind,
3042              uint32_t dex_pc)
3043      : HVariableInputSizeInstruction(
3044            SideEffects::CanTriggerGC(),
3045            dex_pc,
3046            arena,
3047            /* number_of_inputs */ 2,
3048            kArenaAllocMisc) {
3049    SetPackedFlag<kFieldCanBeMoved>(true);
3050    SetPackedField<DeoptimizeKindField>(kind);
3051    SetRawInputAt(0, cond);
3052    SetRawInputAt(1, guard);
3053  }
3054
3055  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3056
3057  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3058    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3059  }
3060
3061  bool NeedsEnvironment() const OVERRIDE { return true; }
3062
3063  bool CanThrow() const OVERRIDE { return true; }
3064
3065  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3066
3067  DataType::Type GetType() const OVERRIDE {
3068    return GuardsAnInput() ? GuardedInput()->GetType() : DataType::Type::kVoid;
3069  }
3070
3071  bool GuardsAnInput() const {
3072    return InputCount() == 2;
3073  }
3074
3075  HInstruction* GuardedInput() const {
3076    DCHECK(GuardsAnInput());
3077    return InputAt(1);
3078  }
3079
3080  void RemoveGuard() {
3081    RemoveInputAt(1);
3082  }
3083
3084  DECLARE_INSTRUCTION(Deoptimize);
3085
3086 private:
3087  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3088  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3089  static constexpr size_t kFieldDeoptimizeKindSize =
3090      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3091  static constexpr size_t kNumberOfDeoptimizePackedBits =
3092      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3093  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3094                "Too many packed fields.");
3095  using DeoptimizeKindField =
3096      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3097
3098  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3099};
3100
3101// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3102// The compiled code checks this flag value in a guard before devirtualized call and
3103// if it's true, starts to do deoptimization.
3104// It has a 4-byte slot on stack.
3105// TODO: allocate a register for this flag.
3106class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3107 public:
3108  // CHA guards are only optimized in a separate pass and it has no side effects
3109  // with regard to other passes.
3110  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3111      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3112  }
3113
3114  DataType::Type GetType() const OVERRIDE { return DataType::Type::kInt32; }
3115
3116  // We do all CHA guard elimination/motion in a single pass, after which there is no
3117  // further guard elimination/motion since a guard might have been used for justification
3118  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3119  // to avoid other optimizations trying to move it.
3120  bool CanBeMoved() const OVERRIDE { return false; }
3121
3122  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3123
3124 private:
3125  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3126};
3127
3128// Represents the ArtMethod that was passed as a first argument to
3129// the method. It is used by instructions that depend on it, like
3130// instructions that work with the dex cache.
3131class HCurrentMethod FINAL : public HExpression<0> {
3132 public:
3133  explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3134      : HExpression(type, SideEffects::None(), dex_pc) {}
3135
3136  DECLARE_INSTRUCTION(CurrentMethod);
3137
3138 private:
3139  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3140};
3141
3142// Fetches an ArtMethod from the virtual table or the interface method table
3143// of a class.
3144class HClassTableGet FINAL : public HExpression<1> {
3145 public:
3146  enum class TableKind {
3147    kVTable,
3148    kIMTable,
3149    kLast = kIMTable
3150  };
3151  HClassTableGet(HInstruction* cls,
3152                 DataType::Type type,
3153                 TableKind kind,
3154                 size_t index,
3155                 uint32_t dex_pc)
3156      : HExpression(type, SideEffects::None(), dex_pc),
3157        index_(index) {
3158    SetPackedField<TableKindField>(kind);
3159    SetRawInputAt(0, cls);
3160  }
3161
3162  bool CanBeMoved() const OVERRIDE { return true; }
3163  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3164    return other->AsClassTableGet()->GetIndex() == index_ &&
3165        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3166  }
3167
3168  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3169  size_t GetIndex() const { return index_; }
3170
3171  DECLARE_INSTRUCTION(ClassTableGet);
3172
3173 private:
3174  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3175  static constexpr size_t kFieldTableKindSize =
3176      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3177  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3178  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3179                "Too many packed fields.");
3180  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3181
3182  // The index of the ArtMethod in the table.
3183  const size_t index_;
3184
3185  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3186};
3187
3188// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3189// have one successor for each entry in the switch table, and the final successor
3190// will be the block containing the next Dex opcode.
3191class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3192 public:
3193  HPackedSwitch(int32_t start_value,
3194                uint32_t num_entries,
3195                HInstruction* input,
3196                uint32_t dex_pc = kNoDexPc)
3197    : HTemplateInstruction(SideEffects::None(), dex_pc),
3198      start_value_(start_value),
3199      num_entries_(num_entries) {
3200    SetRawInputAt(0, input);
3201  }
3202
3203  bool IsControlFlow() const OVERRIDE { return true; }
3204
3205  int32_t GetStartValue() const { return start_value_; }
3206
3207  uint32_t GetNumEntries() const { return num_entries_; }
3208
3209  HBasicBlock* GetDefaultBlock() const {
3210    // Last entry is the default block.
3211    return GetBlock()->GetSuccessors()[num_entries_];
3212  }
3213  DECLARE_INSTRUCTION(PackedSwitch);
3214
3215 private:
3216  const int32_t start_value_;
3217  const uint32_t num_entries_;
3218
3219  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3220};
3221
3222class HUnaryOperation : public HExpression<1> {
3223 public:
3224  HUnaryOperation(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3225      : HExpression(result_type, SideEffects::None(), dex_pc) {
3226    SetRawInputAt(0, input);
3227  }
3228
3229  HInstruction* GetInput() const { return InputAt(0); }
3230  DataType::Type GetResultType() const { return GetType(); }
3231
3232  bool CanBeMoved() const OVERRIDE { return true; }
3233  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3234    return true;
3235  }
3236
3237  // Try to statically evaluate `this` and return a HConstant
3238  // containing the result of this evaluation.  If `this` cannot
3239  // be evaluated as a constant, return null.
3240  HConstant* TryStaticEvaluation() const;
3241
3242  // Apply this operation to `x`.
3243  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3244  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3245  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3246  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3247
3248  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3249
3250 private:
3251  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3252};
3253
3254class HBinaryOperation : public HExpression<2> {
3255 public:
3256  HBinaryOperation(DataType::Type result_type,
3257                   HInstruction* left,
3258                   HInstruction* right,
3259                   SideEffects side_effects = SideEffects::None(),
3260                   uint32_t dex_pc = kNoDexPc)
3261      : HExpression(result_type, side_effects, dex_pc) {
3262    SetRawInputAt(0, left);
3263    SetRawInputAt(1, right);
3264  }
3265
3266  HInstruction* GetLeft() const { return InputAt(0); }
3267  HInstruction* GetRight() const { return InputAt(1); }
3268  DataType::Type GetResultType() const { return GetType(); }
3269
3270  virtual bool IsCommutative() const { return false; }
3271
3272  // Put constant on the right.
3273  // Returns whether order is changed.
3274  bool OrderInputsWithConstantOnTheRight() {
3275    HInstruction* left = InputAt(0);
3276    HInstruction* right = InputAt(1);
3277    if (left->IsConstant() && !right->IsConstant()) {
3278      ReplaceInput(right, 0);
3279      ReplaceInput(left, 1);
3280      return true;
3281    }
3282    return false;
3283  }
3284
3285  // Order inputs by instruction id, but favor constant on the right side.
3286  // This helps GVN for commutative ops.
3287  void OrderInputs() {
3288    DCHECK(IsCommutative());
3289    HInstruction* left = InputAt(0);
3290    HInstruction* right = InputAt(1);
3291    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3292      return;
3293    }
3294    if (OrderInputsWithConstantOnTheRight()) {
3295      return;
3296    }
3297    // Order according to instruction id.
3298    if (left->GetId() > right->GetId()) {
3299      ReplaceInput(right, 0);
3300      ReplaceInput(left, 1);
3301    }
3302  }
3303
3304  bool CanBeMoved() const OVERRIDE { return true; }
3305  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3306    return true;
3307  }
3308
3309  // Try to statically evaluate `this` and return a HConstant
3310  // containing the result of this evaluation.  If `this` cannot
3311  // be evaluated as a constant, return null.
3312  HConstant* TryStaticEvaluation() const;
3313
3314  // Apply this operation to `x` and `y`.
3315  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3316                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3317    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3318    UNREACHABLE();
3319  }
3320  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3321  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3322  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3323                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3324    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3325    UNREACHABLE();
3326  }
3327  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3328  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3329
3330  // Returns an input that can legally be used as the right input and is
3331  // constant, or null.
3332  HConstant* GetConstantRight() const;
3333
3334  // If `GetConstantRight()` returns one of the input, this returns the other
3335  // one. Otherwise it returns null.
3336  HInstruction* GetLeastConstantLeft() const;
3337
3338  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3339
3340 private:
3341  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3342};
3343
3344// The comparison bias applies for floating point operations and indicates how NaN
3345// comparisons are treated:
3346enum class ComparisonBias {
3347  kNoBias,  // bias is not applicable (i.e. for long operation)
3348  kGtBias,  // return 1 for NaN comparisons
3349  kLtBias,  // return -1 for NaN comparisons
3350  kLast = kLtBias
3351};
3352
3353std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3354
3355class HCondition : public HBinaryOperation {
3356 public:
3357  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3358      : HBinaryOperation(DataType::Type::kBool, first, second, SideEffects::None(), dex_pc) {
3359    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3360  }
3361
3362  // For code generation purposes, returns whether this instruction is just before
3363  // `instruction`, and disregard moves in between.
3364  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3365
3366  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3367
3368  virtual IfCondition GetCondition() const = 0;
3369
3370  virtual IfCondition GetOppositeCondition() const = 0;
3371
3372  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3373  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3374
3375  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3376  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3377
3378  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3379    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3380  }
3381
3382  bool IsFPConditionTrueIfNaN() const {
3383    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3384    IfCondition if_cond = GetCondition();
3385    if (if_cond == kCondNE) {
3386      return true;
3387    } else if (if_cond == kCondEQ) {
3388      return false;
3389    }
3390    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3391  }
3392
3393  bool IsFPConditionFalseIfNaN() const {
3394    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3395    IfCondition if_cond = GetCondition();
3396    if (if_cond == kCondEQ) {
3397      return true;
3398    } else if (if_cond == kCondNE) {
3399      return false;
3400    }
3401    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3402  }
3403
3404 protected:
3405  // Needed if we merge a HCompare into a HCondition.
3406  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3407  static constexpr size_t kFieldComparisonBiasSize =
3408      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3409  static constexpr size_t kNumberOfConditionPackedBits =
3410      kFieldComparisonBias + kFieldComparisonBiasSize;
3411  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3412  using ComparisonBiasField =
3413      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3414
3415  template <typename T>
3416  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3417
3418  template <typename T>
3419  int32_t CompareFP(T x, T y) const {
3420    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3421    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3422    // Handle the bias.
3423    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3424  }
3425
3426  // Return an integer constant containing the result of a condition evaluated at compile time.
3427  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3428    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3429  }
3430
3431 private:
3432  DISALLOW_COPY_AND_ASSIGN(HCondition);
3433};
3434
3435// Instruction to check if two inputs are equal to each other.
3436class HEqual FINAL : public HCondition {
3437 public:
3438  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3439      : HCondition(first, second, dex_pc) {}
3440
3441  bool IsCommutative() const OVERRIDE { return true; }
3442
3443  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3444                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3445    return MakeConstantCondition(true, GetDexPc());
3446  }
3447  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3448    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3449  }
3450  // In the following Evaluate methods, a HCompare instruction has
3451  // been merged into this HEqual instruction; evaluate it as
3452  // `Compare(x, y) == 0`.
3453  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3454    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3455                                 GetDexPc());
3456  }
3457  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3458    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3459  }
3460  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3461    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3462  }
3463
3464  DECLARE_INSTRUCTION(Equal);
3465
3466  IfCondition GetCondition() const OVERRIDE {
3467    return kCondEQ;
3468  }
3469
3470  IfCondition GetOppositeCondition() const OVERRIDE {
3471    return kCondNE;
3472  }
3473
3474 private:
3475  template <typename T> static bool Compute(T x, T y) { return x == y; }
3476
3477  DISALLOW_COPY_AND_ASSIGN(HEqual);
3478};
3479
3480class HNotEqual FINAL : public HCondition {
3481 public:
3482  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3483      : HCondition(first, second, dex_pc) {}
3484
3485  bool IsCommutative() const OVERRIDE { return true; }
3486
3487  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3488                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3489    return MakeConstantCondition(false, GetDexPc());
3490  }
3491  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3492    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3493  }
3494  // In the following Evaluate methods, a HCompare instruction has
3495  // been merged into this HNotEqual instruction; evaluate it as
3496  // `Compare(x, y) != 0`.
3497  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3498    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3499  }
3500  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3501    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3502  }
3503  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3504    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3505  }
3506
3507  DECLARE_INSTRUCTION(NotEqual);
3508
3509  IfCondition GetCondition() const OVERRIDE {
3510    return kCondNE;
3511  }
3512
3513  IfCondition GetOppositeCondition() const OVERRIDE {
3514    return kCondEQ;
3515  }
3516
3517 private:
3518  template <typename T> static bool Compute(T x, T y) { return x != y; }
3519
3520  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3521};
3522
3523class HLessThan FINAL : public HCondition {
3524 public:
3525  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3526      : HCondition(first, second, dex_pc) {}
3527
3528  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3529    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3530  }
3531  // In the following Evaluate methods, a HCompare instruction has
3532  // been merged into this HLessThan instruction; evaluate it as
3533  // `Compare(x, y) < 0`.
3534  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3535    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3536  }
3537  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3538    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3539  }
3540  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3541    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3542  }
3543
3544  DECLARE_INSTRUCTION(LessThan);
3545
3546  IfCondition GetCondition() const OVERRIDE {
3547    return kCondLT;
3548  }
3549
3550  IfCondition GetOppositeCondition() const OVERRIDE {
3551    return kCondGE;
3552  }
3553
3554 private:
3555  template <typename T> static bool Compute(T x, T y) { return x < y; }
3556
3557  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3558};
3559
3560class HLessThanOrEqual FINAL : public HCondition {
3561 public:
3562  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3563      : HCondition(first, second, dex_pc) {}
3564
3565  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3566    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3567  }
3568  // In the following Evaluate methods, a HCompare instruction has
3569  // been merged into this HLessThanOrEqual instruction; evaluate it as
3570  // `Compare(x, y) <= 0`.
3571  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3572    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3573  }
3574  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3575    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3576  }
3577  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3578    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3579  }
3580
3581  DECLARE_INSTRUCTION(LessThanOrEqual);
3582
3583  IfCondition GetCondition() const OVERRIDE {
3584    return kCondLE;
3585  }
3586
3587  IfCondition GetOppositeCondition() const OVERRIDE {
3588    return kCondGT;
3589  }
3590
3591 private:
3592  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3593
3594  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3595};
3596
3597class HGreaterThan FINAL : public HCondition {
3598 public:
3599  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3600      : HCondition(first, second, dex_pc) {}
3601
3602  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3603    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3604  }
3605  // In the following Evaluate methods, a HCompare instruction has
3606  // been merged into this HGreaterThan instruction; evaluate it as
3607  // `Compare(x, y) > 0`.
3608  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3609    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3610  }
3611  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3612    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3613  }
3614  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3615    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3616  }
3617
3618  DECLARE_INSTRUCTION(GreaterThan);
3619
3620  IfCondition GetCondition() const OVERRIDE {
3621    return kCondGT;
3622  }
3623
3624  IfCondition GetOppositeCondition() const OVERRIDE {
3625    return kCondLE;
3626  }
3627
3628 private:
3629  template <typename T> static bool Compute(T x, T y) { return x > y; }
3630
3631  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3632};
3633
3634class HGreaterThanOrEqual FINAL : public HCondition {
3635 public:
3636  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3637      : HCondition(first, second, dex_pc) {}
3638
3639  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3640    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3641  }
3642  // In the following Evaluate methods, a HCompare instruction has
3643  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3644  // `Compare(x, y) >= 0`.
3645  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3646    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3647  }
3648  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3649    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3650  }
3651  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3652    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3653  }
3654
3655  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3656
3657  IfCondition GetCondition() const OVERRIDE {
3658    return kCondGE;
3659  }
3660
3661  IfCondition GetOppositeCondition() const OVERRIDE {
3662    return kCondLT;
3663  }
3664
3665 private:
3666  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3667
3668  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3669};
3670
3671class HBelow FINAL : public HCondition {
3672 public:
3673  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3674      : HCondition(first, second, dex_pc) {}
3675
3676  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3677    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3678  }
3679  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3680    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3681  }
3682  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3683                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3684    LOG(FATAL) << DebugName() << " is not defined for float values";
3685    UNREACHABLE();
3686  }
3687  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3688                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3689    LOG(FATAL) << DebugName() << " is not defined for double values";
3690    UNREACHABLE();
3691  }
3692
3693  DECLARE_INSTRUCTION(Below);
3694
3695  IfCondition GetCondition() const OVERRIDE {
3696    return kCondB;
3697  }
3698
3699  IfCondition GetOppositeCondition() const OVERRIDE {
3700    return kCondAE;
3701  }
3702
3703 private:
3704  template <typename T> static bool Compute(T x, T y) {
3705    return MakeUnsigned(x) < MakeUnsigned(y);
3706  }
3707
3708  DISALLOW_COPY_AND_ASSIGN(HBelow);
3709};
3710
3711class HBelowOrEqual FINAL : public HCondition {
3712 public:
3713  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3714      : HCondition(first, second, dex_pc) {}
3715
3716  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3717    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3718  }
3719  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3720    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3721  }
3722  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3723                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3724    LOG(FATAL) << DebugName() << " is not defined for float values";
3725    UNREACHABLE();
3726  }
3727  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3728                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3729    LOG(FATAL) << DebugName() << " is not defined for double values";
3730    UNREACHABLE();
3731  }
3732
3733  DECLARE_INSTRUCTION(BelowOrEqual);
3734
3735  IfCondition GetCondition() const OVERRIDE {
3736    return kCondBE;
3737  }
3738
3739  IfCondition GetOppositeCondition() const OVERRIDE {
3740    return kCondA;
3741  }
3742
3743 private:
3744  template <typename T> static bool Compute(T x, T y) {
3745    return MakeUnsigned(x) <= MakeUnsigned(y);
3746  }
3747
3748  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3749};
3750
3751class HAbove FINAL : public HCondition {
3752 public:
3753  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3754      : HCondition(first, second, dex_pc) {}
3755
3756  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3757    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3758  }
3759  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3760    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3761  }
3762  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3763                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3764    LOG(FATAL) << DebugName() << " is not defined for float values";
3765    UNREACHABLE();
3766  }
3767  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3768                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3769    LOG(FATAL) << DebugName() << " is not defined for double values";
3770    UNREACHABLE();
3771  }
3772
3773  DECLARE_INSTRUCTION(Above);
3774
3775  IfCondition GetCondition() const OVERRIDE {
3776    return kCondA;
3777  }
3778
3779  IfCondition GetOppositeCondition() const OVERRIDE {
3780    return kCondBE;
3781  }
3782
3783 private:
3784  template <typename T> static bool Compute(T x, T y) {
3785    return MakeUnsigned(x) > MakeUnsigned(y);
3786  }
3787
3788  DISALLOW_COPY_AND_ASSIGN(HAbove);
3789};
3790
3791class HAboveOrEqual FINAL : public HCondition {
3792 public:
3793  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3794      : HCondition(first, second, dex_pc) {}
3795
3796  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3797    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3798  }
3799  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3800    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3801  }
3802  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3803                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3804    LOG(FATAL) << DebugName() << " is not defined for float values";
3805    UNREACHABLE();
3806  }
3807  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3808                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3809    LOG(FATAL) << DebugName() << " is not defined for double values";
3810    UNREACHABLE();
3811  }
3812
3813  DECLARE_INSTRUCTION(AboveOrEqual);
3814
3815  IfCondition GetCondition() const OVERRIDE {
3816    return kCondAE;
3817  }
3818
3819  IfCondition GetOppositeCondition() const OVERRIDE {
3820    return kCondB;
3821  }
3822
3823 private:
3824  template <typename T> static bool Compute(T x, T y) {
3825    return MakeUnsigned(x) >= MakeUnsigned(y);
3826  }
3827
3828  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3829};
3830
3831// Instruction to check how two inputs compare to each other.
3832// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3833class HCompare FINAL : public HBinaryOperation {
3834 public:
3835  // Note that `comparison_type` is the type of comparison performed
3836  // between the comparison's inputs, not the type of the instantiated
3837  // HCompare instruction (which is always DataType::Type::kInt).
3838  HCompare(DataType::Type comparison_type,
3839           HInstruction* first,
3840           HInstruction* second,
3841           ComparisonBias bias,
3842           uint32_t dex_pc)
3843      : HBinaryOperation(DataType::Type::kInt32,
3844                         first,
3845                         second,
3846                         SideEffectsForArchRuntimeCalls(comparison_type),
3847                         dex_pc) {
3848    SetPackedField<ComparisonBiasField>(bias);
3849    DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
3850    DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
3851  }
3852
3853  template <typename T>
3854  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3855
3856  template <typename T>
3857  int32_t ComputeFP(T x, T y) const {
3858    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3859    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3860    // Handle the bias.
3861    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3862  }
3863
3864  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3865    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3866    // reach this code path when processing a freshly built HIR
3867    // graph. However HCompare integer instructions can be synthesized
3868    // by the instruction simplifier to implement IntegerCompare and
3869    // IntegerSignum intrinsics, so we have to handle this case.
3870    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3871  }
3872  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3873    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3874  }
3875  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3876    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3877  }
3878  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3879    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3880  }
3881
3882  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3883    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3884  }
3885
3886  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3887
3888  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3889  // Only meaningful for floating-point comparisons.
3890  bool IsGtBias() const {
3891    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3892    return GetBias() == ComparisonBias::kGtBias;
3893  }
3894
3895  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
3896    // Comparisons do not require a runtime call in any back end.
3897    return SideEffects::None();
3898  }
3899
3900  DECLARE_INSTRUCTION(Compare);
3901
3902 protected:
3903  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3904  static constexpr size_t kFieldComparisonBiasSize =
3905      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3906  static constexpr size_t kNumberOfComparePackedBits =
3907      kFieldComparisonBias + kFieldComparisonBiasSize;
3908  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3909  using ComparisonBiasField =
3910      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3911
3912  // Return an integer constant containing the result of a comparison evaluated at compile time.
3913  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3914    DCHECK(value == -1 || value == 0 || value == 1) << value;
3915    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3916  }
3917
3918 private:
3919  DISALLOW_COPY_AND_ASSIGN(HCompare);
3920};
3921
3922class HNewInstance FINAL : public HExpression<1> {
3923 public:
3924  HNewInstance(HInstruction* cls,
3925               uint32_t dex_pc,
3926               dex::TypeIndex type_index,
3927               const DexFile& dex_file,
3928               bool finalizable,
3929               QuickEntrypointEnum entrypoint)
3930      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc),
3931        type_index_(type_index),
3932        dex_file_(dex_file),
3933        entrypoint_(entrypoint) {
3934    SetPackedFlag<kFlagFinalizable>(finalizable);
3935    SetRawInputAt(0, cls);
3936  }
3937
3938  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3939  const DexFile& GetDexFile() const { return dex_file_; }
3940
3941  // Calls runtime so needs an environment.
3942  bool NeedsEnvironment() const OVERRIDE { return true; }
3943
3944  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3945  bool CanThrow() const OVERRIDE { return true; }
3946
3947  bool NeedsChecks() const {
3948    return entrypoint_ == kQuickAllocObjectWithChecks;
3949  }
3950
3951  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3952
3953  bool CanBeNull() const OVERRIDE { return false; }
3954
3955  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3956
3957  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3958    entrypoint_ = entrypoint;
3959  }
3960
3961  HLoadClass* GetLoadClass() const {
3962    HInstruction* input = InputAt(0);
3963    if (input->IsClinitCheck()) {
3964      input = input->InputAt(0);
3965    }
3966    DCHECK(input->IsLoadClass());
3967    return input->AsLoadClass();
3968  }
3969
3970  bool IsStringAlloc() const;
3971
3972  DECLARE_INSTRUCTION(NewInstance);
3973
3974 private:
3975  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3976  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3977  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3978                "Too many packed fields.");
3979
3980  const dex::TypeIndex type_index_;
3981  const DexFile& dex_file_;
3982  QuickEntrypointEnum entrypoint_;
3983
3984  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3985};
3986
3987enum IntrinsicNeedsEnvironmentOrCache {
3988  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3989  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3990};
3991
3992enum IntrinsicSideEffects {
3993  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3994  kReadSideEffects,   // Intrinsic may read heap memory.
3995  kWriteSideEffects,  // Intrinsic may write heap memory.
3996  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3997};
3998
3999enum IntrinsicExceptions {
4000  kNoThrow,  // Intrinsic does not throw any exceptions.
4001  kCanThrow  // Intrinsic may throw exceptions.
4002};
4003
4004class HInvoke : public HVariableInputSizeInstruction {
4005 public:
4006  bool NeedsEnvironment() const OVERRIDE;
4007
4008  void SetArgumentAt(size_t index, HInstruction* argument) {
4009    SetRawInputAt(index, argument);
4010  }
4011
4012  // Return the number of arguments.  This number can be lower than
4013  // the number of inputs returned by InputCount(), as some invoke
4014  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4015  // inputs at the end of their list of inputs.
4016  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4017
4018  DataType::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4019
4020  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4021
4022  InvokeType GetInvokeType() const {
4023    return GetPackedField<InvokeTypeField>();
4024  }
4025
4026  Intrinsics GetIntrinsic() const {
4027    return intrinsic_;
4028  }
4029
4030  void SetIntrinsic(Intrinsics intrinsic,
4031                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4032                    IntrinsicSideEffects side_effects,
4033                    IntrinsicExceptions exceptions);
4034
4035  bool IsFromInlinedInvoke() const {
4036    return GetEnvironment()->IsFromInlinedInvoke();
4037  }
4038
4039  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4040
4041  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4042
4043  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4044
4045  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4046    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4047  }
4048
4049  uint32_t* GetIntrinsicOptimizations() {
4050    return &intrinsic_optimizations_;
4051  }
4052
4053  const uint32_t* GetIntrinsicOptimizations() const {
4054    return &intrinsic_optimizations_;
4055  }
4056
4057  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4058
4059  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4060  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4061
4062  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4063
4064 protected:
4065  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4066  static constexpr size_t kFieldInvokeTypeSize =
4067      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4068  static constexpr size_t kFieldReturnType =
4069      kFieldInvokeType + kFieldInvokeTypeSize;
4070  static constexpr size_t kFieldReturnTypeSize =
4071      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4072  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4073  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4074  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4075  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4076  using ReturnTypeField = BitField<DataType::Type, kFieldReturnType, kFieldReturnTypeSize>;
4077
4078  HInvoke(ArenaAllocator* arena,
4079          uint32_t number_of_arguments,
4080          uint32_t number_of_other_inputs,
4081          DataType::Type return_type,
4082          uint32_t dex_pc,
4083          uint32_t dex_method_index,
4084          ArtMethod* resolved_method,
4085          InvokeType invoke_type)
4086    : HVariableInputSizeInstruction(
4087          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4088          dex_pc,
4089          arena,
4090          number_of_arguments + number_of_other_inputs,
4091          kArenaAllocInvokeInputs),
4092      number_of_arguments_(number_of_arguments),
4093      resolved_method_(resolved_method),
4094      dex_method_index_(dex_method_index),
4095      intrinsic_(Intrinsics::kNone),
4096      intrinsic_optimizations_(0) {
4097    SetPackedField<ReturnTypeField>(return_type);
4098    SetPackedField<InvokeTypeField>(invoke_type);
4099    SetPackedFlag<kFlagCanThrow>(true);
4100  }
4101
4102  uint32_t number_of_arguments_;
4103  ArtMethod* resolved_method_;
4104  const uint32_t dex_method_index_;
4105  Intrinsics intrinsic_;
4106
4107  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4108  uint32_t intrinsic_optimizations_;
4109
4110 private:
4111  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4112};
4113
4114class HInvokeUnresolved FINAL : public HInvoke {
4115 public:
4116  HInvokeUnresolved(ArenaAllocator* arena,
4117                    uint32_t number_of_arguments,
4118                    DataType::Type return_type,
4119                    uint32_t dex_pc,
4120                    uint32_t dex_method_index,
4121                    InvokeType invoke_type)
4122      : HInvoke(arena,
4123                number_of_arguments,
4124                0u /* number_of_other_inputs */,
4125                return_type,
4126                dex_pc,
4127                dex_method_index,
4128                nullptr,
4129                invoke_type) {
4130  }
4131
4132  DECLARE_INSTRUCTION(InvokeUnresolved);
4133
4134 private:
4135  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4136};
4137
4138class HInvokePolymorphic FINAL : public HInvoke {
4139 public:
4140  HInvokePolymorphic(ArenaAllocator* arena,
4141                     uint32_t number_of_arguments,
4142                     DataType::Type return_type,
4143                     uint32_t dex_pc,
4144                     uint32_t dex_method_index)
4145      : HInvoke(arena,
4146                number_of_arguments,
4147                0u /* number_of_other_inputs */,
4148                return_type,
4149                dex_pc,
4150                dex_method_index,
4151                nullptr,
4152                kVirtual) {}
4153
4154  DECLARE_INSTRUCTION(InvokePolymorphic);
4155
4156 private:
4157  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4158};
4159
4160class HInvokeStaticOrDirect FINAL : public HInvoke {
4161 public:
4162  // Requirements of this method call regarding the class
4163  // initialization (clinit) check of its declaring class.
4164  enum class ClinitCheckRequirement {
4165    kNone,      // Class already initialized.
4166    kExplicit,  // Static call having explicit clinit check as last input.
4167    kImplicit,  // Static call implicitly requiring a clinit check.
4168    kLast = kImplicit
4169  };
4170
4171  // Determines how to load the target ArtMethod*.
4172  enum class MethodLoadKind {
4173    // Use a String init ArtMethod* loaded from Thread entrypoints.
4174    kStringInit,
4175
4176    // Use the method's own ArtMethod* loaded by the register allocator.
4177    kRecursive,
4178
4179    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4180    // Used for boot image methods referenced by boot image code.
4181    kBootImageLinkTimePcRelative,
4182
4183    // Use ArtMethod* at a known address, embed the direct address in the code.
4184    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4185    kDirectAddress,
4186
4187    // Load from an entry in the .bss section using a PC-relative load.
4188    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
4189    kBssEntry,
4190
4191    // Make a runtime call to resolve and call the method. This is the last-resort-kind
4192    // used when other kinds are unimplemented on a particular architecture.
4193    kRuntimeCall,
4194  };
4195
4196  // Determines the location of the code pointer.
4197  enum class CodePtrLocation {
4198    // Recursive call, use local PC-relative call instruction.
4199    kCallSelf,
4200
4201    // Use code pointer from the ArtMethod*.
4202    // Used when we don't know the target code. This is also the last-resort-kind used when
4203    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4204    kCallArtMethod,
4205  };
4206
4207  struct DispatchInfo {
4208    MethodLoadKind method_load_kind;
4209    CodePtrLocation code_ptr_location;
4210    // The method load data holds
4211    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4212    //     Note that there are multiple string init methods, each having its own offset.
4213    //   - the method address for kDirectAddress
4214    uint64_t method_load_data;
4215  };
4216
4217  HInvokeStaticOrDirect(ArenaAllocator* arena,
4218                        uint32_t number_of_arguments,
4219                        DataType::Type return_type,
4220                        uint32_t dex_pc,
4221                        uint32_t method_index,
4222                        ArtMethod* resolved_method,
4223                        DispatchInfo dispatch_info,
4224                        InvokeType invoke_type,
4225                        MethodReference target_method,
4226                        ClinitCheckRequirement clinit_check_requirement)
4227      : HInvoke(arena,
4228                number_of_arguments,
4229                // There is potentially one extra argument for the HCurrentMethod node, and
4230                // potentially one other if the clinit check is explicit, and potentially
4231                // one other if the method is a string factory.
4232                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4233                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4234                return_type,
4235                dex_pc,
4236                method_index,
4237                resolved_method,
4238                invoke_type),
4239        target_method_(target_method),
4240        dispatch_info_(dispatch_info) {
4241    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4242  }
4243
4244  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4245    bool had_current_method_input = HasCurrentMethodInput();
4246    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4247
4248    // Using the current method is the default and once we find a better
4249    // method load kind, we should not go back to using the current method.
4250    DCHECK(had_current_method_input || !needs_current_method_input);
4251
4252    if (had_current_method_input && !needs_current_method_input) {
4253      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4254      RemoveInputAt(GetSpecialInputIndex());
4255    }
4256    dispatch_info_ = dispatch_info;
4257  }
4258
4259  DispatchInfo GetDispatchInfo() const {
4260    return dispatch_info_;
4261  }
4262
4263  void AddSpecialInput(HInstruction* input) {
4264    // We allow only one special input.
4265    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4266    DCHECK(InputCount() == GetSpecialInputIndex() ||
4267           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4268    InsertInputAt(GetSpecialInputIndex(), input);
4269  }
4270
4271  using HInstruction::GetInputRecords;  // Keep the const version visible.
4272  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4273    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4274    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4275      DCHECK(!input_records.empty());
4276      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4277      HInstruction* last_input = input_records.back().GetInstruction();
4278      // Note: `last_input` may be null during arguments setup.
4279      if (last_input != nullptr) {
4280        // `last_input` is the last input of a static invoke marked as having
4281        // an explicit clinit check. It must either be:
4282        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4283        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4284        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4285      }
4286    }
4287    return input_records;
4288  }
4289
4290  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4291    // We access the method via the dex cache so we can't do an implicit null check.
4292    // TODO: for intrinsics we can generate implicit null checks.
4293    return false;
4294  }
4295
4296  bool CanBeNull() const OVERRIDE {
4297    return GetPackedField<ReturnTypeField>() == DataType::Type::kReference && !IsStringInit();
4298  }
4299
4300  // Get the index of the special input, if any.
4301  //
4302  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4303  // method pointer; otherwise there may be one platform-specific special input,
4304  // such as PC-relative addressing base.
4305  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4306  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4307
4308  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4309  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4310  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4311  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4312  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4313  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4314  bool HasPcRelativeMethodLoadKind() const {
4315    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4316           GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4317  }
4318  bool HasCurrentMethodInput() const {
4319    // This function can be called only after the invoke has been fully initialized by the builder.
4320    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4321      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4322      return true;
4323    } else {
4324      DCHECK(InputCount() == GetSpecialInputIndex() ||
4325             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4326      return false;
4327    }
4328  }
4329
4330  QuickEntrypointEnum GetStringInitEntryPoint() const {
4331    DCHECK(IsStringInit());
4332    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4333  }
4334
4335  uint64_t GetMethodAddress() const {
4336    DCHECK(HasMethodAddress());
4337    return dispatch_info_.method_load_data;
4338  }
4339
4340  const DexFile& GetDexFileForPcRelativeDexCache() const;
4341
4342  ClinitCheckRequirement GetClinitCheckRequirement() const {
4343    return GetPackedField<ClinitCheckRequirementField>();
4344  }
4345
4346  // Is this instruction a call to a static method?
4347  bool IsStatic() const {
4348    return GetInvokeType() == kStatic;
4349  }
4350
4351  MethodReference GetTargetMethod() const {
4352    return target_method_;
4353  }
4354
4355  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4356  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4357  // instruction; only relevant for static calls with explicit clinit check.
4358  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4359    DCHECK(IsStaticWithExplicitClinitCheck());
4360    size_t last_input_index = inputs_.size() - 1u;
4361    HInstruction* last_input = inputs_.back().GetInstruction();
4362    DCHECK(last_input != nullptr);
4363    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4364    RemoveAsUserOfInput(last_input_index);
4365    inputs_.pop_back();
4366    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4367    DCHECK(!IsStaticWithExplicitClinitCheck());
4368  }
4369
4370  // Is this a call to a static method whose declaring class has an
4371  // explicit initialization check in the graph?
4372  bool IsStaticWithExplicitClinitCheck() const {
4373    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4374  }
4375
4376  // Is this a call to a static method whose declaring class has an
4377  // implicit intialization check requirement?
4378  bool IsStaticWithImplicitClinitCheck() const {
4379    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4380  }
4381
4382  // Does this method load kind need the current method as an input?
4383  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4384    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4385  }
4386
4387  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4388
4389 private:
4390  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4391  static constexpr size_t kFieldClinitCheckRequirementSize =
4392      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4393  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4394      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4395  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4396                "Too many packed fields.");
4397  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4398                                               kFieldClinitCheckRequirement,
4399                                               kFieldClinitCheckRequirementSize>;
4400
4401  // Cached values of the resolved method, to avoid needing the mutator lock.
4402  MethodReference target_method_;
4403  DispatchInfo dispatch_info_;
4404
4405  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4406};
4407std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4408std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4409
4410class HInvokeVirtual FINAL : public HInvoke {
4411 public:
4412  HInvokeVirtual(ArenaAllocator* arena,
4413                 uint32_t number_of_arguments,
4414                 DataType::Type return_type,
4415                 uint32_t dex_pc,
4416                 uint32_t dex_method_index,
4417                 ArtMethod* resolved_method,
4418                 uint32_t vtable_index)
4419      : HInvoke(arena,
4420                number_of_arguments,
4421                0u,
4422                return_type,
4423                dex_pc,
4424                dex_method_index,
4425                resolved_method,
4426                kVirtual),
4427        vtable_index_(vtable_index) {}
4428
4429  bool CanBeNull() const OVERRIDE {
4430    switch (GetIntrinsic()) {
4431      case Intrinsics::kThreadCurrentThread:
4432      case Intrinsics::kStringBufferAppend:
4433      case Intrinsics::kStringBufferToString:
4434      case Intrinsics::kStringBuilderAppend:
4435      case Intrinsics::kStringBuilderToString:
4436        return false;
4437      default:
4438        return HInvoke::CanBeNull();
4439    }
4440  }
4441
4442  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4443    // TODO: Add implicit null checks in intrinsics.
4444    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4445  }
4446
4447  uint32_t GetVTableIndex() const { return vtable_index_; }
4448
4449  DECLARE_INSTRUCTION(InvokeVirtual);
4450
4451 private:
4452  // Cached value of the resolved method, to avoid needing the mutator lock.
4453  const uint32_t vtable_index_;
4454
4455  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4456};
4457
4458class HInvokeInterface FINAL : public HInvoke {
4459 public:
4460  HInvokeInterface(ArenaAllocator* arena,
4461                   uint32_t number_of_arguments,
4462                   DataType::Type return_type,
4463                   uint32_t dex_pc,
4464                   uint32_t dex_method_index,
4465                   ArtMethod* resolved_method,
4466                   uint32_t imt_index)
4467      : HInvoke(arena,
4468                number_of_arguments,
4469                0u,
4470                return_type,
4471                dex_pc,
4472                dex_method_index,
4473                resolved_method,
4474                kInterface),
4475        imt_index_(imt_index) {}
4476
4477  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4478    // TODO: Add implicit null checks in intrinsics.
4479    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4480  }
4481
4482  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4483    // The assembly stub currently needs it.
4484    return true;
4485  }
4486
4487  uint32_t GetImtIndex() const { return imt_index_; }
4488  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4489
4490  DECLARE_INSTRUCTION(InvokeInterface);
4491
4492 private:
4493  // Cached value of the resolved method, to avoid needing the mutator lock.
4494  const uint32_t imt_index_;
4495
4496  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4497};
4498
4499class HNeg FINAL : public HUnaryOperation {
4500 public:
4501  HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4502      : HUnaryOperation(result_type, input, dex_pc) {
4503    DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4504  }
4505
4506  template <typename T> static T Compute(T x) { return -x; }
4507
4508  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4509    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4510  }
4511  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4512    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4513  }
4514  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4515    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4516  }
4517  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4518    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4519  }
4520
4521  DECLARE_INSTRUCTION(Neg);
4522
4523 private:
4524  DISALLOW_COPY_AND_ASSIGN(HNeg);
4525};
4526
4527class HNewArray FINAL : public HExpression<2> {
4528 public:
4529  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4530      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4531    SetRawInputAt(0, cls);
4532    SetRawInputAt(1, length);
4533  }
4534
4535  // Calls runtime so needs an environment.
4536  bool NeedsEnvironment() const OVERRIDE { return true; }
4537
4538  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4539  bool CanThrow() const OVERRIDE { return true; }
4540
4541  bool CanBeNull() const OVERRIDE { return false; }
4542
4543  HLoadClass* GetLoadClass() const {
4544    DCHECK(InputAt(0)->IsLoadClass());
4545    return InputAt(0)->AsLoadClass();
4546  }
4547
4548  HInstruction* GetLength() const {
4549    return InputAt(1);
4550  }
4551
4552  DECLARE_INSTRUCTION(NewArray);
4553
4554 private:
4555  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4556};
4557
4558class HAdd FINAL : public HBinaryOperation {
4559 public:
4560  HAdd(DataType::Type result_type,
4561       HInstruction* left,
4562       HInstruction* right,
4563       uint32_t dex_pc = kNoDexPc)
4564      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4565
4566  bool IsCommutative() const OVERRIDE { return true; }
4567
4568  template <typename T> static T Compute(T x, T y) { return x + y; }
4569
4570  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4571    return GetBlock()->GetGraph()->GetIntConstant(
4572        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4573  }
4574  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4575    return GetBlock()->GetGraph()->GetLongConstant(
4576        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4577  }
4578  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4579    return GetBlock()->GetGraph()->GetFloatConstant(
4580        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4581  }
4582  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4583    return GetBlock()->GetGraph()->GetDoubleConstant(
4584        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4585  }
4586
4587  DECLARE_INSTRUCTION(Add);
4588
4589 private:
4590  DISALLOW_COPY_AND_ASSIGN(HAdd);
4591};
4592
4593class HSub FINAL : public HBinaryOperation {
4594 public:
4595  HSub(DataType::Type result_type,
4596       HInstruction* left,
4597       HInstruction* right,
4598       uint32_t dex_pc = kNoDexPc)
4599      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4600
4601  template <typename T> static T Compute(T x, T y) { return x - y; }
4602
4603  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4604    return GetBlock()->GetGraph()->GetIntConstant(
4605        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4606  }
4607  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4608    return GetBlock()->GetGraph()->GetLongConstant(
4609        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4610  }
4611  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4612    return GetBlock()->GetGraph()->GetFloatConstant(
4613        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4614  }
4615  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4616    return GetBlock()->GetGraph()->GetDoubleConstant(
4617        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4618  }
4619
4620  DECLARE_INSTRUCTION(Sub);
4621
4622 private:
4623  DISALLOW_COPY_AND_ASSIGN(HSub);
4624};
4625
4626class HMul FINAL : public HBinaryOperation {
4627 public:
4628  HMul(DataType::Type result_type,
4629       HInstruction* left,
4630       HInstruction* right,
4631       uint32_t dex_pc = kNoDexPc)
4632      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4633
4634  bool IsCommutative() const OVERRIDE { return true; }
4635
4636  template <typename T> static T Compute(T x, T y) { return x * y; }
4637
4638  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4639    return GetBlock()->GetGraph()->GetIntConstant(
4640        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4641  }
4642  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4643    return GetBlock()->GetGraph()->GetLongConstant(
4644        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4645  }
4646  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4647    return GetBlock()->GetGraph()->GetFloatConstant(
4648        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4649  }
4650  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4651    return GetBlock()->GetGraph()->GetDoubleConstant(
4652        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4653  }
4654
4655  DECLARE_INSTRUCTION(Mul);
4656
4657 private:
4658  DISALLOW_COPY_AND_ASSIGN(HMul);
4659};
4660
4661class HDiv FINAL : public HBinaryOperation {
4662 public:
4663  HDiv(DataType::Type result_type,
4664       HInstruction* left,
4665       HInstruction* right,
4666       uint32_t dex_pc)
4667      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4668
4669  template <typename T>
4670  T ComputeIntegral(T x, T y) const {
4671    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4672    // Our graph structure ensures we never have 0 for `y` during
4673    // constant folding.
4674    DCHECK_NE(y, 0);
4675    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4676    return (y == -1) ? -x : x / y;
4677  }
4678
4679  template <typename T>
4680  T ComputeFP(T x, T y) const {
4681    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4682    return x / y;
4683  }
4684
4685  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4686    return GetBlock()->GetGraph()->GetIntConstant(
4687        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4688  }
4689  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4690    return GetBlock()->GetGraph()->GetLongConstant(
4691        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4692  }
4693  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4694    return GetBlock()->GetGraph()->GetFloatConstant(
4695        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4696  }
4697  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4698    return GetBlock()->GetGraph()->GetDoubleConstant(
4699        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4700  }
4701
4702  DECLARE_INSTRUCTION(Div);
4703
4704 private:
4705  DISALLOW_COPY_AND_ASSIGN(HDiv);
4706};
4707
4708class HRem FINAL : public HBinaryOperation {
4709 public:
4710  HRem(DataType::Type result_type,
4711       HInstruction* left,
4712       HInstruction* right,
4713       uint32_t dex_pc)
4714      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4715
4716  template <typename T>
4717  T ComputeIntegral(T x, T y) const {
4718    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4719    // Our graph structure ensures we never have 0 for `y` during
4720    // constant folding.
4721    DCHECK_NE(y, 0);
4722    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4723    return (y == -1) ? 0 : x % y;
4724  }
4725
4726  template <typename T>
4727  T ComputeFP(T x, T y) const {
4728    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4729    return std::fmod(x, y);
4730  }
4731
4732  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4733    return GetBlock()->GetGraph()->GetIntConstant(
4734        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4735  }
4736  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4737    return GetBlock()->GetGraph()->GetLongConstant(
4738        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4739  }
4740  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4741    return GetBlock()->GetGraph()->GetFloatConstant(
4742        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4743  }
4744  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4745    return GetBlock()->GetGraph()->GetDoubleConstant(
4746        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4747  }
4748
4749  DECLARE_INSTRUCTION(Rem);
4750
4751 private:
4752  DISALLOW_COPY_AND_ASSIGN(HRem);
4753};
4754
4755class HDivZeroCheck FINAL : public HExpression<1> {
4756 public:
4757  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4758  // constructor.
4759  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4760      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4761    SetRawInputAt(0, value);
4762  }
4763
4764  DataType::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4765
4766  bool CanBeMoved() const OVERRIDE { return true; }
4767
4768  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4769    return true;
4770  }
4771
4772  bool NeedsEnvironment() const OVERRIDE { return true; }
4773  bool CanThrow() const OVERRIDE { return true; }
4774
4775  DECLARE_INSTRUCTION(DivZeroCheck);
4776
4777 private:
4778  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4779};
4780
4781class HShl FINAL : public HBinaryOperation {
4782 public:
4783  HShl(DataType::Type result_type,
4784       HInstruction* value,
4785       HInstruction* distance,
4786       uint32_t dex_pc = kNoDexPc)
4787      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4788    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4789    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4790  }
4791
4792  template <typename T>
4793  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4794    return value << (distance & max_shift_distance);
4795  }
4796
4797  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4798    return GetBlock()->GetGraph()->GetIntConstant(
4799        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4800  }
4801  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4802    return GetBlock()->GetGraph()->GetLongConstant(
4803        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4804  }
4805  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4806                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4807    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4808    UNREACHABLE();
4809  }
4810  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4811                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4812    LOG(FATAL) << DebugName() << " is not defined for float values";
4813    UNREACHABLE();
4814  }
4815  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4816                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4817    LOG(FATAL) << DebugName() << " is not defined for double values";
4818    UNREACHABLE();
4819  }
4820
4821  DECLARE_INSTRUCTION(Shl);
4822
4823 private:
4824  DISALLOW_COPY_AND_ASSIGN(HShl);
4825};
4826
4827class HShr FINAL : public HBinaryOperation {
4828 public:
4829  HShr(DataType::Type result_type,
4830       HInstruction* value,
4831       HInstruction* distance,
4832       uint32_t dex_pc = kNoDexPc)
4833      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4834    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4835    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4836  }
4837
4838  template <typename T>
4839  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4840    return value >> (distance & max_shift_distance);
4841  }
4842
4843  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4844    return GetBlock()->GetGraph()->GetIntConstant(
4845        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4846  }
4847  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4848    return GetBlock()->GetGraph()->GetLongConstant(
4849        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4850  }
4851  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4852                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4853    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4854    UNREACHABLE();
4855  }
4856  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4857                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4858    LOG(FATAL) << DebugName() << " is not defined for float values";
4859    UNREACHABLE();
4860  }
4861  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4862                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4863    LOG(FATAL) << DebugName() << " is not defined for double values";
4864    UNREACHABLE();
4865  }
4866
4867  DECLARE_INSTRUCTION(Shr);
4868
4869 private:
4870  DISALLOW_COPY_AND_ASSIGN(HShr);
4871};
4872
4873class HUShr FINAL : public HBinaryOperation {
4874 public:
4875  HUShr(DataType::Type result_type,
4876        HInstruction* value,
4877        HInstruction* distance,
4878        uint32_t dex_pc = kNoDexPc)
4879      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4880    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4881    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4882  }
4883
4884  template <typename T>
4885  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4886    typedef typename std::make_unsigned<T>::type V;
4887    V ux = static_cast<V>(value);
4888    return static_cast<T>(ux >> (distance & max_shift_distance));
4889  }
4890
4891  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4892    return GetBlock()->GetGraph()->GetIntConstant(
4893        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4894  }
4895  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4896    return GetBlock()->GetGraph()->GetLongConstant(
4897        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4898  }
4899  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4900                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4901    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4902    UNREACHABLE();
4903  }
4904  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4905                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4906    LOG(FATAL) << DebugName() << " is not defined for float values";
4907    UNREACHABLE();
4908  }
4909  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4910                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4911    LOG(FATAL) << DebugName() << " is not defined for double values";
4912    UNREACHABLE();
4913  }
4914
4915  DECLARE_INSTRUCTION(UShr);
4916
4917 private:
4918  DISALLOW_COPY_AND_ASSIGN(HUShr);
4919};
4920
4921class HAnd FINAL : public HBinaryOperation {
4922 public:
4923  HAnd(DataType::Type result_type,
4924       HInstruction* left,
4925       HInstruction* right,
4926       uint32_t dex_pc = kNoDexPc)
4927      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4928
4929  bool IsCommutative() const OVERRIDE { return true; }
4930
4931  template <typename T> static T Compute(T x, T y) { return x & y; }
4932
4933  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4934    return GetBlock()->GetGraph()->GetIntConstant(
4935        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4936  }
4937  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4938    return GetBlock()->GetGraph()->GetLongConstant(
4939        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4940  }
4941  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4942                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4943    LOG(FATAL) << DebugName() << " is not defined for float values";
4944    UNREACHABLE();
4945  }
4946  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4947                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4948    LOG(FATAL) << DebugName() << " is not defined for double values";
4949    UNREACHABLE();
4950  }
4951
4952  DECLARE_INSTRUCTION(And);
4953
4954 private:
4955  DISALLOW_COPY_AND_ASSIGN(HAnd);
4956};
4957
4958class HOr FINAL : public HBinaryOperation {
4959 public:
4960  HOr(DataType::Type result_type,
4961      HInstruction* left,
4962      HInstruction* right,
4963      uint32_t dex_pc = kNoDexPc)
4964      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4965
4966  bool IsCommutative() const OVERRIDE { return true; }
4967
4968  template <typename T> static T Compute(T x, T y) { return x | y; }
4969
4970  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4971    return GetBlock()->GetGraph()->GetIntConstant(
4972        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4973  }
4974  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4975    return GetBlock()->GetGraph()->GetLongConstant(
4976        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4977  }
4978  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4979                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4980    LOG(FATAL) << DebugName() << " is not defined for float values";
4981    UNREACHABLE();
4982  }
4983  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4984                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4985    LOG(FATAL) << DebugName() << " is not defined for double values";
4986    UNREACHABLE();
4987  }
4988
4989  DECLARE_INSTRUCTION(Or);
4990
4991 private:
4992  DISALLOW_COPY_AND_ASSIGN(HOr);
4993};
4994
4995class HXor FINAL : public HBinaryOperation {
4996 public:
4997  HXor(DataType::Type result_type,
4998       HInstruction* left,
4999       HInstruction* right,
5000       uint32_t dex_pc = kNoDexPc)
5001      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5002
5003  bool IsCommutative() const OVERRIDE { return true; }
5004
5005  template <typename T> static T Compute(T x, T y) { return x ^ y; }
5006
5007  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5008    return GetBlock()->GetGraph()->GetIntConstant(
5009        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5010  }
5011  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5012    return GetBlock()->GetGraph()->GetLongConstant(
5013        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5014  }
5015  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5016                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5017    LOG(FATAL) << DebugName() << " is not defined for float values";
5018    UNREACHABLE();
5019  }
5020  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5021                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5022    LOG(FATAL) << DebugName() << " is not defined for double values";
5023    UNREACHABLE();
5024  }
5025
5026  DECLARE_INSTRUCTION(Xor);
5027
5028 private:
5029  DISALLOW_COPY_AND_ASSIGN(HXor);
5030};
5031
5032class HRor FINAL : public HBinaryOperation {
5033 public:
5034  HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5035    : HBinaryOperation(result_type, value, distance) {
5036    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5037    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5038  }
5039
5040  template <typename T>
5041  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5042    typedef typename std::make_unsigned<T>::type V;
5043    V ux = static_cast<V>(value);
5044    if ((distance & max_shift_value) == 0) {
5045      return static_cast<T>(ux);
5046    } else {
5047      const V reg_bits = sizeof(T) * 8;
5048      return static_cast<T>(ux >> (distance & max_shift_value)) |
5049                           (value << (reg_bits - (distance & max_shift_value)));
5050    }
5051  }
5052
5053  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5054    return GetBlock()->GetGraph()->GetIntConstant(
5055        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5056  }
5057  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5058    return GetBlock()->GetGraph()->GetLongConstant(
5059        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5060  }
5061  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5062                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5063    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5064    UNREACHABLE();
5065  }
5066  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5067                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5068    LOG(FATAL) << DebugName() << " is not defined for float values";
5069    UNREACHABLE();
5070  }
5071  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5072                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5073    LOG(FATAL) << DebugName() << " is not defined for double values";
5074    UNREACHABLE();
5075  }
5076
5077  DECLARE_INSTRUCTION(Ror);
5078
5079 private:
5080  DISALLOW_COPY_AND_ASSIGN(HRor);
5081};
5082
5083// The value of a parameter in this method. Its location depends on
5084// the calling convention.
5085class HParameterValue FINAL : public HExpression<0> {
5086 public:
5087  HParameterValue(const DexFile& dex_file,
5088                  dex::TypeIndex type_index,
5089                  uint8_t index,
5090                  DataType::Type parameter_type,
5091                  bool is_this = false)
5092      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5093        dex_file_(dex_file),
5094        type_index_(type_index),
5095        index_(index) {
5096    SetPackedFlag<kFlagIsThis>(is_this);
5097    SetPackedFlag<kFlagCanBeNull>(!is_this);
5098  }
5099
5100  const DexFile& GetDexFile() const { return dex_file_; }
5101  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5102  uint8_t GetIndex() const { return index_; }
5103  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5104
5105  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5106  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5107
5108  DECLARE_INSTRUCTION(ParameterValue);
5109
5110 private:
5111  // Whether or not the parameter value corresponds to 'this' argument.
5112  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5113  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5114  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5115  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5116                "Too many packed fields.");
5117
5118  const DexFile& dex_file_;
5119  const dex::TypeIndex type_index_;
5120  // The index of this parameter in the parameters list. Must be less
5121  // than HGraph::number_of_in_vregs_.
5122  const uint8_t index_;
5123
5124  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5125};
5126
5127class HNot FINAL : public HUnaryOperation {
5128 public:
5129  HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5130      : HUnaryOperation(result_type, input, dex_pc) {}
5131
5132  bool CanBeMoved() const OVERRIDE { return true; }
5133  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5134    return true;
5135  }
5136
5137  template <typename T> static T Compute(T x) { return ~x; }
5138
5139  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5140    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5141  }
5142  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5143    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5144  }
5145  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5146    LOG(FATAL) << DebugName() << " is not defined for float values";
5147    UNREACHABLE();
5148  }
5149  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5150    LOG(FATAL) << DebugName() << " is not defined for double values";
5151    UNREACHABLE();
5152  }
5153
5154  DECLARE_INSTRUCTION(Not);
5155
5156 private:
5157  DISALLOW_COPY_AND_ASSIGN(HNot);
5158};
5159
5160class HBooleanNot FINAL : public HUnaryOperation {
5161 public:
5162  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5163      : HUnaryOperation(DataType::Type::kBool, input, dex_pc) {}
5164
5165  bool CanBeMoved() const OVERRIDE { return true; }
5166  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5167    return true;
5168  }
5169
5170  template <typename T> static bool Compute(T x) {
5171    DCHECK(IsUint<1>(x)) << x;
5172    return !x;
5173  }
5174
5175  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5176    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5177  }
5178  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5179    LOG(FATAL) << DebugName() << " is not defined for long values";
5180    UNREACHABLE();
5181  }
5182  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5183    LOG(FATAL) << DebugName() << " is not defined for float values";
5184    UNREACHABLE();
5185  }
5186  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5187    LOG(FATAL) << DebugName() << " is not defined for double values";
5188    UNREACHABLE();
5189  }
5190
5191  DECLARE_INSTRUCTION(BooleanNot);
5192
5193 private:
5194  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5195};
5196
5197class HTypeConversion FINAL : public HExpression<1> {
5198 public:
5199  // Instantiate a type conversion of `input` to `result_type`.
5200  HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5201      : HExpression(result_type, SideEffects::None(), dex_pc) {
5202    SetRawInputAt(0, input);
5203    // Invariant: We should never generate a conversion to a Boolean value.
5204    DCHECK_NE(DataType::Type::kBool, result_type);
5205  }
5206
5207  HInstruction* GetInput() const { return InputAt(0); }
5208  DataType::Type GetInputType() const { return GetInput()->GetType(); }
5209  DataType::Type GetResultType() const { return GetType(); }
5210
5211  bool CanBeMoved() const OVERRIDE { return true; }
5212  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5213    return true;
5214  }
5215
5216  // Try to statically evaluate the conversion and return a HConstant
5217  // containing the result.  If the input cannot be converted, return nullptr.
5218  HConstant* TryStaticEvaluation() const;
5219
5220  DECLARE_INSTRUCTION(TypeConversion);
5221
5222 private:
5223  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5224};
5225
5226static constexpr uint32_t kNoRegNumber = -1;
5227
5228class HNullCheck FINAL : public HExpression<1> {
5229 public:
5230  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5231  // constructor.
5232  HNullCheck(HInstruction* value, uint32_t dex_pc)
5233      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5234    SetRawInputAt(0, value);
5235  }
5236
5237  bool CanBeMoved() const OVERRIDE { return true; }
5238  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5239    return true;
5240  }
5241
5242  bool NeedsEnvironment() const OVERRIDE { return true; }
5243
5244  bool CanThrow() const OVERRIDE { return true; }
5245
5246  bool CanBeNull() const OVERRIDE { return false; }
5247
5248
5249  DECLARE_INSTRUCTION(NullCheck);
5250
5251 private:
5252  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5253};
5254
5255// Embeds an ArtField and all the information required by the compiler. We cache
5256// that information to avoid requiring the mutator lock every time we need it.
5257class FieldInfo : public ValueObject {
5258 public:
5259  FieldInfo(ArtField* field,
5260            MemberOffset field_offset,
5261            DataType::Type field_type,
5262            bool is_volatile,
5263            uint32_t index,
5264            uint16_t declaring_class_def_index,
5265            const DexFile& dex_file)
5266      : field_(field),
5267        field_offset_(field_offset),
5268        field_type_(field_type),
5269        is_volatile_(is_volatile),
5270        index_(index),
5271        declaring_class_def_index_(declaring_class_def_index),
5272        dex_file_(dex_file) {}
5273
5274  ArtField* GetField() const { return field_; }
5275  MemberOffset GetFieldOffset() const { return field_offset_; }
5276  DataType::Type GetFieldType() const { return field_type_; }
5277  uint32_t GetFieldIndex() const { return index_; }
5278  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5279  const DexFile& GetDexFile() const { return dex_file_; }
5280  bool IsVolatile() const { return is_volatile_; }
5281
5282 private:
5283  ArtField* const field_;
5284  const MemberOffset field_offset_;
5285  const DataType::Type field_type_;
5286  const bool is_volatile_;
5287  const uint32_t index_;
5288  const uint16_t declaring_class_def_index_;
5289  const DexFile& dex_file_;
5290};
5291
5292class HInstanceFieldGet FINAL : public HExpression<1> {
5293 public:
5294  HInstanceFieldGet(HInstruction* value,
5295                    ArtField* field,
5296                    DataType::Type field_type,
5297                    MemberOffset field_offset,
5298                    bool is_volatile,
5299                    uint32_t field_idx,
5300                    uint16_t declaring_class_def_index,
5301                    const DexFile& dex_file,
5302                    uint32_t dex_pc)
5303      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5304        field_info_(field,
5305                    field_offset,
5306                    field_type,
5307                    is_volatile,
5308                    field_idx,
5309                    declaring_class_def_index,
5310                    dex_file) {
5311    SetRawInputAt(0, value);
5312  }
5313
5314  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5315
5316  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5317    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5318    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5319  }
5320
5321  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5322    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5323  }
5324
5325  size_t ComputeHashCode() const OVERRIDE {
5326    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5327  }
5328
5329  const FieldInfo& GetFieldInfo() const { return field_info_; }
5330  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5331  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5332  bool IsVolatile() const { return field_info_.IsVolatile(); }
5333
5334  DECLARE_INSTRUCTION(InstanceFieldGet);
5335
5336 private:
5337  const FieldInfo field_info_;
5338
5339  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5340};
5341
5342class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5343 public:
5344  HInstanceFieldSet(HInstruction* object,
5345                    HInstruction* value,
5346                    ArtField* field,
5347                    DataType::Type field_type,
5348                    MemberOffset field_offset,
5349                    bool is_volatile,
5350                    uint32_t field_idx,
5351                    uint16_t declaring_class_def_index,
5352                    const DexFile& dex_file,
5353                    uint32_t dex_pc)
5354      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5355        field_info_(field,
5356                    field_offset,
5357                    field_type,
5358                    is_volatile,
5359                    field_idx,
5360                    declaring_class_def_index,
5361                    dex_file) {
5362    SetPackedFlag<kFlagValueCanBeNull>(true);
5363    SetRawInputAt(0, object);
5364    SetRawInputAt(1, value);
5365  }
5366
5367  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5368    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5369  }
5370
5371  const FieldInfo& GetFieldInfo() const { return field_info_; }
5372  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5373  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5374  bool IsVolatile() const { return field_info_.IsVolatile(); }
5375  HInstruction* GetValue() const { return InputAt(1); }
5376  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5377  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5378
5379  DECLARE_INSTRUCTION(InstanceFieldSet);
5380
5381 private:
5382  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5383  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5384  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5385                "Too many packed fields.");
5386
5387  const FieldInfo field_info_;
5388
5389  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5390};
5391
5392class HArrayGet FINAL : public HExpression<2> {
5393 public:
5394  HArrayGet(HInstruction* array,
5395            HInstruction* index,
5396            DataType::Type type,
5397            uint32_t dex_pc)
5398     : HArrayGet(array,
5399                 index,
5400                 type,
5401                 SideEffects::ArrayReadOfType(type),
5402                 dex_pc,
5403                 /* is_string_char_at */ false) {}
5404
5405  HArrayGet(HInstruction* array,
5406            HInstruction* index,
5407            DataType::Type type,
5408            SideEffects side_effects,
5409            uint32_t dex_pc,
5410            bool is_string_char_at)
5411      : HExpression(type, side_effects, dex_pc) {
5412    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5413    SetRawInputAt(0, array);
5414    SetRawInputAt(1, index);
5415  }
5416
5417  bool CanBeMoved() const OVERRIDE { return true; }
5418  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5419    return true;
5420  }
5421  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5422    // TODO: We can be smarter here.
5423    // Currently, unless the array is the result of NewArray, the array access is always
5424    // preceded by some form of null NullCheck necessary for the bounds check, usually
5425    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5426    // dynamic BCE. There are cases when these could be removed to produce better code.
5427    // If we ever add optimizations to do so we should allow an implicit check here
5428    // (as long as the address falls in the first page).
5429    //
5430    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5431    //     a = cond ? new int[1] : null;
5432    //     a[0];  // The Phi does not need bounds check for either input.
5433    return false;
5434  }
5435
5436  bool IsEquivalentOf(HArrayGet* other) const {
5437    bool result = (GetDexPc() == other->GetDexPc());
5438    if (kIsDebugBuild && result) {
5439      DCHECK_EQ(GetBlock(), other->GetBlock());
5440      DCHECK_EQ(GetArray(), other->GetArray());
5441      DCHECK_EQ(GetIndex(), other->GetIndex());
5442      if (DataType::IsIntOrLongType(GetType())) {
5443        DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5444      } else {
5445        DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5446        DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5447      }
5448    }
5449    return result;
5450  }
5451
5452  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5453
5454  HInstruction* GetArray() const { return InputAt(0); }
5455  HInstruction* GetIndex() const { return InputAt(1); }
5456
5457  DECLARE_INSTRUCTION(ArrayGet);
5458
5459 private:
5460  // We treat a String as an array, creating the HArrayGet from String.charAt()
5461  // intrinsic in the instruction simplifier. We can always determine whether
5462  // a particular HArrayGet is actually a String.charAt() by looking at the type
5463  // of the input but that requires holding the mutator lock, so we prefer to use
5464  // a flag, so that code generators don't need to do the locking.
5465  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5466  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5467  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5468                "Too many packed fields.");
5469
5470  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5471};
5472
5473class HArraySet FINAL : public HTemplateInstruction<3> {
5474 public:
5475  HArraySet(HInstruction* array,
5476            HInstruction* index,
5477            HInstruction* value,
5478            DataType::Type expected_component_type,
5479            uint32_t dex_pc)
5480      : HArraySet(array,
5481                  index,
5482                  value,
5483                  expected_component_type,
5484                  // Make a best guess for side effects now, may be refined during SSA building.
5485                  ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
5486                  dex_pc) {}
5487
5488  HArraySet(HInstruction* array,
5489            HInstruction* index,
5490            HInstruction* value,
5491            DataType::Type expected_component_type,
5492            SideEffects side_effects,
5493            uint32_t dex_pc)
5494      : HTemplateInstruction(side_effects, dex_pc) {
5495    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5496    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
5497    SetPackedFlag<kFlagValueCanBeNull>(true);
5498    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5499    SetRawInputAt(0, array);
5500    SetRawInputAt(1, index);
5501    SetRawInputAt(2, value);
5502  }
5503
5504  bool NeedsEnvironment() const OVERRIDE {
5505    // We call a runtime method to throw ArrayStoreException.
5506    return NeedsTypeCheck();
5507  }
5508
5509  // Can throw ArrayStoreException.
5510  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5511
5512  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5513    // TODO: Same as for ArrayGet.
5514    return false;
5515  }
5516
5517  void ClearNeedsTypeCheck() {
5518    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5519  }
5520
5521  void ClearValueCanBeNull() {
5522    SetPackedFlag<kFlagValueCanBeNull>(false);
5523  }
5524
5525  void SetStaticTypeOfArrayIsObjectArray() {
5526    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5527  }
5528
5529  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5530  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5531  bool StaticTypeOfArrayIsObjectArray() const {
5532    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5533  }
5534
5535  HInstruction* GetArray() const { return InputAt(0); }
5536  HInstruction* GetIndex() const { return InputAt(1); }
5537  HInstruction* GetValue() const { return InputAt(2); }
5538
5539  DataType::Type GetComponentType() const {
5540    return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
5541  }
5542
5543  static DataType::Type GetComponentType(DataType::Type value_type,
5544                                         DataType::Type expected_component_type) {
5545    // The Dex format does not type floating point index operations. Since the
5546    // `expected_component_type` comes from SSA building and can therefore not
5547    // be correct, we also check what is the value type. If it is a floating
5548    // point type, we must use that type.
5549    return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
5550        ? value_type
5551        : expected_component_type;
5552  }
5553
5554  DataType::Type GetRawExpectedComponentType() const {
5555    return GetPackedField<ExpectedComponentTypeField>();
5556  }
5557
5558  static SideEffects ComputeSideEffects(DataType::Type type) {
5559    return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
5560  }
5561
5562  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
5563    return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
5564                                                      : SideEffects::None();
5565  }
5566
5567  DECLARE_INSTRUCTION(ArraySet);
5568
5569 private:
5570  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5571  static constexpr size_t kFieldExpectedComponentTypeSize =
5572      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
5573  static constexpr size_t kFlagNeedsTypeCheck =
5574      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5575  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5576  // Cached information for the reference_type_info_ so that codegen
5577  // does not need to inspect the static type.
5578  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5579  static constexpr size_t kNumberOfArraySetPackedBits =
5580      kFlagStaticTypeOfArrayIsObjectArray + 1;
5581  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5582  using ExpectedComponentTypeField =
5583      BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5584
5585  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5586};
5587
5588class HArrayLength FINAL : public HExpression<1> {
5589 public:
5590  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5591      : HExpression(DataType::Type::kInt32, SideEffects::None(), dex_pc) {
5592    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5593    // Note that arrays do not change length, so the instruction does not
5594    // depend on any write.
5595    SetRawInputAt(0, array);
5596  }
5597
5598  bool CanBeMoved() const OVERRIDE { return true; }
5599  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5600    return true;
5601  }
5602  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5603    return obj == InputAt(0);
5604  }
5605
5606  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5607
5608  DECLARE_INSTRUCTION(ArrayLength);
5609
5610 private:
5611  // We treat a String as an array, creating the HArrayLength from String.length()
5612  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5613  // determine whether a particular HArrayLength is actually a String.length() by
5614  // looking at the type of the input but that requires holding the mutator lock, so
5615  // we prefer to use a flag, so that code generators don't need to do the locking.
5616  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5617  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5618  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5619                "Too many packed fields.");
5620
5621  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5622};
5623
5624class HBoundsCheck FINAL : public HExpression<2> {
5625 public:
5626  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5627  // constructor.
5628  HBoundsCheck(HInstruction* index,
5629               HInstruction* length,
5630               uint32_t dex_pc,
5631               bool string_char_at = false)
5632      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5633    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
5634    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5635    SetRawInputAt(0, index);
5636    SetRawInputAt(1, length);
5637  }
5638
5639  bool CanBeMoved() const OVERRIDE { return true; }
5640  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5641    return true;
5642  }
5643
5644  bool NeedsEnvironment() const OVERRIDE { return true; }
5645
5646  bool CanThrow() const OVERRIDE { return true; }
5647
5648  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5649
5650  HInstruction* GetIndex() const { return InputAt(0); }
5651
5652  DECLARE_INSTRUCTION(BoundsCheck);
5653
5654 private:
5655  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5656
5657  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5658};
5659
5660class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5661 public:
5662  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5663      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5664
5665  bool NeedsEnvironment() const OVERRIDE {
5666    return true;
5667  }
5668
5669  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5670  SlowPathCode* GetSlowPath() const { return slow_path_; }
5671
5672  DECLARE_INSTRUCTION(SuspendCheck);
5673
5674 private:
5675  // Only used for code generation, in order to share the same slow path between back edges
5676  // of a same loop.
5677  SlowPathCode* slow_path_;
5678
5679  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5680};
5681
5682// Pseudo-instruction which provides the native debugger with mapping information.
5683// It ensures that we can generate line number and local variables at this point.
5684class HNativeDebugInfo : public HTemplateInstruction<0> {
5685 public:
5686  explicit HNativeDebugInfo(uint32_t dex_pc)
5687      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5688
5689  bool NeedsEnvironment() const OVERRIDE {
5690    return true;
5691  }
5692
5693  DECLARE_INSTRUCTION(NativeDebugInfo);
5694
5695 private:
5696  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5697};
5698
5699/**
5700 * Instruction to load a Class object.
5701 */
5702class HLoadClass FINAL : public HInstruction {
5703 public:
5704  // Determines how to load the Class.
5705  enum class LoadKind {
5706    // We cannot load this class. See HSharpening::SharpenLoadClass.
5707    kInvalid = -1,
5708
5709    // Use the Class* from the method's own ArtMethod*.
5710    kReferrersClass,
5711
5712    // Use PC-relative boot image Class* address that will be known at link time.
5713    // Used for boot image classes referenced by boot image code.
5714    kBootImageLinkTimePcRelative,
5715
5716    // Use a known boot image Class* address, embedded in the code by the codegen.
5717    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5718    kBootImageAddress,
5719
5720    // Use a PC-relative load from a boot image ClassTable mmapped into the .bss
5721    // of the oat file.
5722    kBootImageClassTable,
5723
5724    // Load from an entry in the .bss section using a PC-relative load.
5725    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5726    kBssEntry,
5727
5728    // Load from the root table associated with the JIT compiled method.
5729    kJitTableAddress,
5730
5731    // Load using a simple runtime call. This is the fall-back load kind when
5732    // the codegen is unable to use another appropriate kind.
5733    kRuntimeCall,
5734
5735    kLast = kRuntimeCall
5736  };
5737
5738  HLoadClass(HCurrentMethod* current_method,
5739             dex::TypeIndex type_index,
5740             const DexFile& dex_file,
5741             Handle<mirror::Class> klass,
5742             bool is_referrers_class,
5743             uint32_t dex_pc,
5744             bool needs_access_check)
5745      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5746        special_input_(HUserRecord<HInstruction*>(current_method)),
5747        type_index_(type_index),
5748        dex_file_(dex_file),
5749        klass_(klass),
5750        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5751    // Referrers class should not need access check. We never inline unverified
5752    // methods so we can't possibly end up in this situation.
5753    DCHECK(!is_referrers_class || !needs_access_check);
5754
5755    SetPackedField<LoadKindField>(
5756        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
5757    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5758    SetPackedFlag<kFlagIsInBootImage>(false);
5759    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5760  }
5761
5762  void SetLoadKind(LoadKind load_kind);
5763
5764  LoadKind GetLoadKind() const {
5765    return GetPackedField<LoadKindField>();
5766  }
5767
5768  bool CanBeMoved() const OVERRIDE { return true; }
5769
5770  bool InstructionDataEquals(const HInstruction* other) const;
5771
5772  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5773
5774  bool CanBeNull() const OVERRIDE { return false; }
5775
5776  bool NeedsEnvironment() const OVERRIDE {
5777    return CanCallRuntime();
5778  }
5779
5780  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5781    // The entrypoint the code generator is going to call does not do
5782    // clinit of the class.
5783    DCHECK(!NeedsAccessCheck());
5784    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5785  }
5786
5787  bool CanCallRuntime() const {
5788    return NeedsAccessCheck() ||
5789           MustGenerateClinitCheck() ||
5790           GetLoadKind() == LoadKind::kRuntimeCall ||
5791           GetLoadKind() == LoadKind::kBssEntry;
5792  }
5793
5794  bool CanThrow() const OVERRIDE {
5795    return NeedsAccessCheck() ||
5796           MustGenerateClinitCheck() ||
5797           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5798           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5799           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5800           ((GetLoadKind() == LoadKind::kRuntimeCall ||
5801             GetLoadKind() == LoadKind::kBssEntry) &&
5802            !IsInBootImage());
5803  }
5804
5805  ReferenceTypeInfo GetLoadedClassRTI() {
5806    return loaded_class_rti_;
5807  }
5808
5809  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5810    // Make sure we only set exact types (the loaded class should never be merged).
5811    DCHECK(rti.IsExact());
5812    loaded_class_rti_ = rti;
5813  }
5814
5815  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5816  const DexFile& GetDexFile() const { return dex_file_; }
5817
5818  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5819    return GetLoadKind() == LoadKind::kRuntimeCall;
5820  }
5821
5822  static SideEffects SideEffectsForArchRuntimeCalls() {
5823    return SideEffects::CanTriggerGC();
5824  }
5825
5826  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5827  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5828  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5829  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5830
5831  void MarkInBootImage() {
5832    SetPackedFlag<kFlagIsInBootImage>(true);
5833  }
5834
5835  void AddSpecialInput(HInstruction* special_input);
5836
5837  using HInstruction::GetInputRecords;  // Keep the const version visible.
5838  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5839    return ArrayRef<HUserRecord<HInstruction*>>(
5840        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5841  }
5842
5843  DataType::Type GetType() const OVERRIDE {
5844    return DataType::Type::kReference;
5845  }
5846
5847  Handle<mirror::Class> GetClass() const {
5848    return klass_;
5849  }
5850
5851  DECLARE_INSTRUCTION(LoadClass);
5852
5853 private:
5854  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5855  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5856  // Whether this instruction must generate the initialization check.
5857  // Used for code generation.
5858  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5859  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5860  static constexpr size_t kFieldLoadKindSize =
5861      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5862  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5863  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5864  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5865
5866  static bool HasTypeReference(LoadKind load_kind) {
5867    return load_kind == LoadKind::kReferrersClass ||
5868        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5869        load_kind == LoadKind::kBootImageClassTable ||
5870        load_kind == LoadKind::kBssEntry ||
5871        load_kind == LoadKind::kRuntimeCall;
5872  }
5873
5874  void SetLoadKindInternal(LoadKind load_kind);
5875
5876  // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
5877  // For other load kinds it's empty or possibly some architecture-specific instruction
5878  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5879  HUserRecord<HInstruction*> special_input_;
5880
5881  // A type index and dex file where the class can be accessed. The dex file can be:
5882  // - The compiling method's dex file if the class is defined there too.
5883  // - The compiling method's dex file if the class is referenced there.
5884  // - The dex file where the class is defined. When the load kind can only be
5885  //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
5886  const dex::TypeIndex type_index_;
5887  const DexFile& dex_file_;
5888
5889  Handle<mirror::Class> klass_;
5890
5891  ReferenceTypeInfo loaded_class_rti_;
5892
5893  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5894};
5895std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5896
5897// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5898inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5899  // The special input is used for PC-relative loads on some architectures,
5900  // including literal pool loads, which are PC-relative too.
5901  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5902         GetLoadKind() == LoadKind::kBootImageAddress ||
5903         GetLoadKind() == LoadKind::kBootImageClassTable ||
5904         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5905  DCHECK(special_input_.GetInstruction() == nullptr);
5906  special_input_ = HUserRecord<HInstruction*>(special_input);
5907  special_input->AddUseAt(this, 0);
5908}
5909
5910class HLoadString FINAL : public HInstruction {
5911 public:
5912  // Determines how to load the String.
5913  enum class LoadKind {
5914    // Use PC-relative boot image String* address that will be known at link time.
5915    // Used for boot image strings referenced by boot image code.
5916    kBootImageLinkTimePcRelative,
5917
5918    // Use a known boot image String* address, embedded in the code by the codegen.
5919    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5920    kBootImageAddress,
5921
5922    // Use a PC-relative load from a boot image InternTable mmapped into the .bss
5923    // of the oat file.
5924    kBootImageInternTable,
5925
5926    // Load from an entry in the .bss section using a PC-relative load.
5927    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5928    kBssEntry,
5929
5930    // Load from the root table associated with the JIT compiled method.
5931    kJitTableAddress,
5932
5933    // Load using a simple runtime call. This is the fall-back load kind when
5934    // the codegen is unable to use another appropriate kind.
5935    kRuntimeCall,
5936
5937    kLast = kRuntimeCall,
5938  };
5939
5940  HLoadString(HCurrentMethod* current_method,
5941              dex::StringIndex string_index,
5942              const DexFile& dex_file,
5943              uint32_t dex_pc)
5944      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5945        special_input_(HUserRecord<HInstruction*>(current_method)),
5946        string_index_(string_index),
5947        dex_file_(dex_file) {
5948    SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
5949  }
5950
5951  void SetLoadKind(LoadKind load_kind);
5952
5953  LoadKind GetLoadKind() const {
5954    return GetPackedField<LoadKindField>();
5955  }
5956
5957  const DexFile& GetDexFile() const {
5958    return dex_file_;
5959  }
5960
5961  dex::StringIndex GetStringIndex() const {
5962    return string_index_;
5963  }
5964
5965  Handle<mirror::String> GetString() const {
5966    return string_;
5967  }
5968
5969  void SetString(Handle<mirror::String> str) {
5970    string_ = str;
5971  }
5972
5973  bool CanBeMoved() const OVERRIDE { return true; }
5974
5975  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5976
5977  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5978
5979  // Will call the runtime if we need to load the string through
5980  // the dex cache and the string is not guaranteed to be there yet.
5981  bool NeedsEnvironment() const OVERRIDE {
5982    LoadKind load_kind = GetLoadKind();
5983    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5984        load_kind == LoadKind::kBootImageAddress ||
5985        load_kind == LoadKind::kBootImageInternTable ||
5986        load_kind == LoadKind::kJitTableAddress) {
5987      return false;
5988    }
5989    return true;
5990  }
5991
5992  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5993    return GetLoadKind() == LoadKind::kRuntimeCall;
5994  }
5995
5996  bool CanBeNull() const OVERRIDE { return false; }
5997  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5998
5999  static SideEffects SideEffectsForArchRuntimeCalls() {
6000    return SideEffects::CanTriggerGC();
6001  }
6002
6003  void AddSpecialInput(HInstruction* special_input);
6004
6005  using HInstruction::GetInputRecords;  // Keep the const version visible.
6006  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6007    return ArrayRef<HUserRecord<HInstruction*>>(
6008        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6009  }
6010
6011  DataType::Type GetType() const OVERRIDE {
6012    return DataType::Type::kReference;
6013  }
6014
6015  DECLARE_INSTRUCTION(LoadString);
6016
6017 private:
6018  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6019  static constexpr size_t kFieldLoadKindSize =
6020      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6021  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6022  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6023  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6024
6025  void SetLoadKindInternal(LoadKind load_kind);
6026
6027  // The special input is the HCurrentMethod for kRuntimeCall.
6028  // For other load kinds it's empty or possibly some architecture-specific instruction
6029  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6030  HUserRecord<HInstruction*> special_input_;
6031
6032  dex::StringIndex string_index_;
6033  const DexFile& dex_file_;
6034
6035  Handle<mirror::String> string_;
6036
6037  DISALLOW_COPY_AND_ASSIGN(HLoadString);
6038};
6039std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6040
6041// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6042inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6043  // The special input is used for PC-relative loads on some architectures,
6044  // including literal pool loads, which are PC-relative too.
6045  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6046         GetLoadKind() == LoadKind::kBootImageAddress ||
6047         GetLoadKind() == LoadKind::kBootImageInternTable ||
6048         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6049  // HLoadString::GetInputRecords() returns an empty array at this point,
6050  // so use the GetInputRecords() from the base class to set the input record.
6051  DCHECK(special_input_.GetInstruction() == nullptr);
6052  special_input_ = HUserRecord<HInstruction*>(special_input);
6053  special_input->AddUseAt(this, 0);
6054}
6055
6056/**
6057 * Performs an initialization check on its Class object input.
6058 */
6059class HClinitCheck FINAL : public HExpression<1> {
6060 public:
6061  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6062      : HExpression(
6063            DataType::Type::kReference,
6064            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6065            dex_pc) {
6066    SetRawInputAt(0, constant);
6067  }
6068
6069  bool CanBeMoved() const OVERRIDE { return true; }
6070  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6071    return true;
6072  }
6073
6074  bool NeedsEnvironment() const OVERRIDE {
6075    // May call runtime to initialize the class.
6076    return true;
6077  }
6078
6079  bool CanThrow() const OVERRIDE { return true; }
6080
6081  HLoadClass* GetLoadClass() const {
6082    DCHECK(InputAt(0)->IsLoadClass());
6083    return InputAt(0)->AsLoadClass();
6084  }
6085
6086  DECLARE_INSTRUCTION(ClinitCheck);
6087
6088 private:
6089  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6090};
6091
6092class HStaticFieldGet FINAL : public HExpression<1> {
6093 public:
6094  HStaticFieldGet(HInstruction* cls,
6095                  ArtField* field,
6096                  DataType::Type field_type,
6097                  MemberOffset field_offset,
6098                  bool is_volatile,
6099                  uint32_t field_idx,
6100                  uint16_t declaring_class_def_index,
6101                  const DexFile& dex_file,
6102                  uint32_t dex_pc)
6103      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6104        field_info_(field,
6105                    field_offset,
6106                    field_type,
6107                    is_volatile,
6108                    field_idx,
6109                    declaring_class_def_index,
6110                    dex_file) {
6111    SetRawInputAt(0, cls);
6112  }
6113
6114
6115  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6116
6117  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6118    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6119    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6120  }
6121
6122  size_t ComputeHashCode() const OVERRIDE {
6123    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6124  }
6125
6126  const FieldInfo& GetFieldInfo() const { return field_info_; }
6127  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6128  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6129  bool IsVolatile() const { return field_info_.IsVolatile(); }
6130
6131  DECLARE_INSTRUCTION(StaticFieldGet);
6132
6133 private:
6134  const FieldInfo field_info_;
6135
6136  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6137};
6138
6139class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6140 public:
6141  HStaticFieldSet(HInstruction* cls,
6142                  HInstruction* value,
6143                  ArtField* field,
6144                  DataType::Type field_type,
6145                  MemberOffset field_offset,
6146                  bool is_volatile,
6147                  uint32_t field_idx,
6148                  uint16_t declaring_class_def_index,
6149                  const DexFile& dex_file,
6150                  uint32_t dex_pc)
6151      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6152        field_info_(field,
6153                    field_offset,
6154                    field_type,
6155                    is_volatile,
6156                    field_idx,
6157                    declaring_class_def_index,
6158                    dex_file) {
6159    SetPackedFlag<kFlagValueCanBeNull>(true);
6160    SetRawInputAt(0, cls);
6161    SetRawInputAt(1, value);
6162  }
6163
6164  const FieldInfo& GetFieldInfo() const { return field_info_; }
6165  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6166  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6167  bool IsVolatile() const { return field_info_.IsVolatile(); }
6168
6169  HInstruction* GetValue() const { return InputAt(1); }
6170  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6171  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6172
6173  DECLARE_INSTRUCTION(StaticFieldSet);
6174
6175 private:
6176  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6177  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6178  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6179                "Too many packed fields.");
6180
6181  const FieldInfo field_info_;
6182
6183  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6184};
6185
6186class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6187 public:
6188  HUnresolvedInstanceFieldGet(HInstruction* obj,
6189                              DataType::Type field_type,
6190                              uint32_t field_index,
6191                              uint32_t dex_pc)
6192      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6193        field_index_(field_index) {
6194    SetRawInputAt(0, obj);
6195  }
6196
6197  bool NeedsEnvironment() const OVERRIDE { return true; }
6198  bool CanThrow() const OVERRIDE { return true; }
6199
6200  DataType::Type GetFieldType() const { return GetType(); }
6201  uint32_t GetFieldIndex() const { return field_index_; }
6202
6203  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6204
6205 private:
6206  const uint32_t field_index_;
6207
6208  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6209};
6210
6211class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6212 public:
6213  HUnresolvedInstanceFieldSet(HInstruction* obj,
6214                              HInstruction* value,
6215                              DataType::Type field_type,
6216                              uint32_t field_index,
6217                              uint32_t dex_pc)
6218      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6219        field_index_(field_index) {
6220    SetPackedField<FieldTypeField>(field_type);
6221    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6222    SetRawInputAt(0, obj);
6223    SetRawInputAt(1, value);
6224  }
6225
6226  bool NeedsEnvironment() const OVERRIDE { return true; }
6227  bool CanThrow() const OVERRIDE { return true; }
6228
6229  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6230  uint32_t GetFieldIndex() const { return field_index_; }
6231
6232  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6233
6234 private:
6235  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6236  static constexpr size_t kFieldFieldTypeSize =
6237      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6238  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6239      kFieldFieldType + kFieldFieldTypeSize;
6240  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6241                "Too many packed fields.");
6242  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6243
6244  const uint32_t field_index_;
6245
6246  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6247};
6248
6249class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6250 public:
6251  HUnresolvedStaticFieldGet(DataType::Type field_type,
6252                            uint32_t field_index,
6253                            uint32_t dex_pc)
6254      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6255        field_index_(field_index) {
6256  }
6257
6258  bool NeedsEnvironment() const OVERRIDE { return true; }
6259  bool CanThrow() const OVERRIDE { return true; }
6260
6261  DataType::Type GetFieldType() const { return GetType(); }
6262  uint32_t GetFieldIndex() const { return field_index_; }
6263
6264  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6265
6266 private:
6267  const uint32_t field_index_;
6268
6269  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6270};
6271
6272class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6273 public:
6274  HUnresolvedStaticFieldSet(HInstruction* value,
6275                            DataType::Type field_type,
6276                            uint32_t field_index,
6277                            uint32_t dex_pc)
6278      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6279        field_index_(field_index) {
6280    SetPackedField<FieldTypeField>(field_type);
6281    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6282    SetRawInputAt(0, value);
6283  }
6284
6285  bool NeedsEnvironment() const OVERRIDE { return true; }
6286  bool CanThrow() const OVERRIDE { return true; }
6287
6288  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6289  uint32_t GetFieldIndex() const { return field_index_; }
6290
6291  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6292
6293 private:
6294  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6295  static constexpr size_t kFieldFieldTypeSize =
6296      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6297  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6298      kFieldFieldType + kFieldFieldTypeSize;
6299  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6300                "Too many packed fields.");
6301  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6302
6303  const uint32_t field_index_;
6304
6305  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6306};
6307
6308// Implement the move-exception DEX instruction.
6309class HLoadException FINAL : public HExpression<0> {
6310 public:
6311  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6312      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc) {}
6313
6314  bool CanBeNull() const OVERRIDE { return false; }
6315
6316  DECLARE_INSTRUCTION(LoadException);
6317
6318 private:
6319  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6320};
6321
6322// Implicit part of move-exception which clears thread-local exception storage.
6323// Must not be removed because the runtime expects the TLS to get cleared.
6324class HClearException FINAL : public HTemplateInstruction<0> {
6325 public:
6326  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6327      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6328
6329  DECLARE_INSTRUCTION(ClearException);
6330
6331 private:
6332  DISALLOW_COPY_AND_ASSIGN(HClearException);
6333};
6334
6335class HThrow FINAL : public HTemplateInstruction<1> {
6336 public:
6337  HThrow(HInstruction* exception, uint32_t dex_pc)
6338      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6339    SetRawInputAt(0, exception);
6340  }
6341
6342  bool IsControlFlow() const OVERRIDE { return true; }
6343
6344  bool NeedsEnvironment() const OVERRIDE { return true; }
6345
6346  bool CanThrow() const OVERRIDE { return true; }
6347
6348
6349  DECLARE_INSTRUCTION(Throw);
6350
6351 private:
6352  DISALLOW_COPY_AND_ASSIGN(HThrow);
6353};
6354
6355/**
6356 * Implementation strategies for the code generator of a HInstanceOf
6357 * or `HCheckCast`.
6358 */
6359enum class TypeCheckKind {
6360  kUnresolvedCheck,       // Check against an unresolved type.
6361  kExactCheck,            // Can do a single class compare.
6362  kClassHierarchyCheck,   // Can just walk the super class chain.
6363  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6364  kInterfaceCheck,        // No optimization yet when checking against an interface.
6365  kArrayObjectCheck,      // Can just check if the array is not primitive.
6366  kArrayCheck,            // No optimization yet when checking against a generic array.
6367  kLast = kArrayCheck
6368};
6369
6370std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6371
6372class HInstanceOf FINAL : public HExpression<2> {
6373 public:
6374  HInstanceOf(HInstruction* object,
6375              HLoadClass* constant,
6376              TypeCheckKind check_kind,
6377              uint32_t dex_pc)
6378      : HExpression(DataType::Type::kBool,
6379                    SideEffectsForArchRuntimeCalls(check_kind),
6380                    dex_pc) {
6381    SetPackedField<TypeCheckKindField>(check_kind);
6382    SetPackedFlag<kFlagMustDoNullCheck>(true);
6383    SetRawInputAt(0, object);
6384    SetRawInputAt(1, constant);
6385  }
6386
6387  bool CanBeMoved() const OVERRIDE { return true; }
6388
6389  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6390    return true;
6391  }
6392
6393  bool NeedsEnvironment() const OVERRIDE {
6394    return CanCallRuntime(GetTypeCheckKind());
6395  }
6396
6397  // Used only in code generation.
6398  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6399  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6400  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6401  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6402
6403  static bool CanCallRuntime(TypeCheckKind check_kind) {
6404    // Mips currently does runtime calls for any other checks.
6405    return check_kind != TypeCheckKind::kExactCheck;
6406  }
6407
6408  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6409    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6410  }
6411
6412  DECLARE_INSTRUCTION(InstanceOf);
6413
6414 private:
6415  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6416  static constexpr size_t kFieldTypeCheckKindSize =
6417      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6418  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6419  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6420  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6421  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6422
6423  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6424};
6425
6426class HBoundType FINAL : public HExpression<1> {
6427 public:
6428  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6429      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc),
6430        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6431    SetPackedFlag<kFlagUpperCanBeNull>(true);
6432    SetPackedFlag<kFlagCanBeNull>(true);
6433    DCHECK_EQ(input->GetType(), DataType::Type::kReference);
6434    SetRawInputAt(0, input);
6435  }
6436
6437  // {Get,Set}Upper* should only be used in reference type propagation.
6438  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6439  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6440  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6441
6442  void SetCanBeNull(bool can_be_null) {
6443    DCHECK(GetUpperCanBeNull() || !can_be_null);
6444    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6445  }
6446
6447  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6448
6449  DECLARE_INSTRUCTION(BoundType);
6450
6451 private:
6452  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6453  // is false then CanBeNull() cannot be true).
6454  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6455  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6456  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6457  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6458
6459  // Encodes the most upper class that this instruction can have. In other words
6460  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6461  // It is used to bound the type in cases like:
6462  //   if (x instanceof ClassX) {
6463  //     // uper_bound_ will be ClassX
6464  //   }
6465  ReferenceTypeInfo upper_bound_;
6466
6467  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6468};
6469
6470class HCheckCast FINAL : public HTemplateInstruction<2> {
6471 public:
6472  HCheckCast(HInstruction* object,
6473             HLoadClass* constant,
6474             TypeCheckKind check_kind,
6475             uint32_t dex_pc)
6476      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6477    SetPackedField<TypeCheckKindField>(check_kind);
6478    SetPackedFlag<kFlagMustDoNullCheck>(true);
6479    SetRawInputAt(0, object);
6480    SetRawInputAt(1, constant);
6481  }
6482
6483  bool CanBeMoved() const OVERRIDE { return true; }
6484
6485  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6486    return true;
6487  }
6488
6489  bool NeedsEnvironment() const OVERRIDE {
6490    // Instruction may throw a CheckCastError.
6491    return true;
6492  }
6493
6494  bool CanThrow() const OVERRIDE { return true; }
6495
6496  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6497  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6498  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6499  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6500
6501  DECLARE_INSTRUCTION(CheckCast);
6502
6503 private:
6504  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6505  static constexpr size_t kFieldTypeCheckKindSize =
6506      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6507  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6508  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6509  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6510  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6511
6512  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6513};
6514
6515/**
6516 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6517 * @details We define the combined barrier types that are actually required
6518 * by the Java Memory Model, rather than using exactly the terminology from
6519 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6520 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6521 * store atomicity issues, and the recipes there are not always entirely sufficient.
6522 * The current recipe is as follows:
6523 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6524 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6525 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6526 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6527 *    class has final fields.
6528 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6529 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6530 */
6531enum MemBarrierKind {
6532  kAnyStore,
6533  kLoadAny,
6534  kStoreStore,
6535  kAnyAny,
6536  kNTStoreStore,
6537  kLastBarrierKind = kNTStoreStore
6538};
6539std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6540
6541class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6542 public:
6543  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6544      : HTemplateInstruction(
6545            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6546    SetPackedField<BarrierKindField>(barrier_kind);
6547  }
6548
6549  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6550
6551  DECLARE_INSTRUCTION(MemoryBarrier);
6552
6553 private:
6554  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6555  static constexpr size_t kFieldBarrierKindSize =
6556      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6557  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6558      kFieldBarrierKind + kFieldBarrierKindSize;
6559  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6560                "Too many packed fields.");
6561  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6562
6563  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6564};
6565
6566// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6567// the specified object(s), with respect to any subsequent store that might "publish"
6568// (i.e. make visible) the specified object to another thread.
6569//
6570// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6571// for all final fields (that were set) at the end of the invoked constructor.
6572//
6573// The constructor fence models the freeze actions for the final fields of an object
6574// being constructed (semantically at the end of the constructor). Constructor fences
6575// have a per-object affinity; two separate objects being constructed get two separate
6576// constructor fences.
6577//
6578// (Note: that if calling a super-constructor or forwarding to another constructor,
6579// the freezes would happen at the end of *that* constructor being invoked).
6580//
6581// The memory model guarantees that when the object being constructed is "published" after
6582// constructor completion (i.e. escapes the current thread via a store), then any final field
6583// writes must be observable on other threads (once they observe that publication).
6584//
6585// Further, anything written before the freeze, and read by dereferencing through the final field,
6586// must also be visible (so final object field could itself have an object with non-final fields;
6587// yet the freeze must also extend to them).
6588//
6589// Constructor example:
6590//
6591//     class HasFinal {
6592//        final int field;                              Optimizing IR for <init>()V:
6593//        HasFinal() {
6594//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6595//          // freeze(this.field);                      HConstructorFence(this)
6596//        }                                             HReturn
6597//     }
6598//
6599// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6600// already-initialized classes; in that case the allocation must act as a "default-initializer"
6601// of the object which effectively writes the class pointer "final field".
6602//
6603// For example, we can model default-initialiation as roughly the equivalent of the following:
6604//
6605//     class Object {
6606//       private final Class header;
6607//     }
6608//
6609//  Java code:                                           Optimizing IR:
6610//
6611//     T new_instance<T>() {
6612//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6613//       obj.header = T.class;                           // header write is done by above call.
6614//       // freeze(obj.header)                           HConstructorFence(obj)
6615//       return (T)obj;
6616//     }
6617//
6618// See also:
6619// * CompilerDriver::RequiresConstructorBarrier
6620// * QuasiAtomic::ThreadFenceForConstructor
6621//
6622class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6623                                  // A fence has variable inputs because the inputs can be removed
6624                                  // after prepare_for_register_allocation phase.
6625                                  // (TODO: In the future a fence could freeze multiple objects
6626                                  //        after merging two fences together.)
6627 public:
6628  // `fence_object` is the reference that needs to be protected for correct publication.
6629  //
6630  // It makes sense in the following situations:
6631  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6632  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6633  //
6634  // After construction the `fence_object` becomes the 0th input.
6635  // This is not an input in a real sense, but just a convenient place to stash the information
6636  // about the associated object.
6637  HConstructorFence(HInstruction* fence_object,
6638                    uint32_t dex_pc,
6639                    ArenaAllocator* arena)
6640    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6641    // constraints described in the class header. We claim that these SideEffects constraints
6642    // enforce a superset of the real constraints.
6643    //
6644    // The ordering described above is conservatively modeled with SideEffects as follows:
6645    //
6646    // * To prevent reordering of the publication stores:
6647    // ----> "Reads of objects" is the initial SideEffect.
6648    // * For every primitive final field store in the constructor:
6649    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6650    // * If there are any stores to reference final fields in the constructor:
6651    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6652    //       that are reachable from `fence_object` also need to be prevented for reordering
6653    //       (and we do not want to do alias analysis to figure out what those stores are).
6654    //
6655    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6656    // even more conservative approach than the one described above, and prevents all of the
6657    // above reordering without analyzing any of the instructions in the constructor.
6658    //
6659    // If in a later phase we discover that there are no writes to reference final fields,
6660    // we can refine the side effect to a smaller set of type reads (see above constraints).
6661      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6662                                      dex_pc,
6663                                      arena,
6664                                      /* number_of_inputs */ 1,
6665                                      kArenaAllocConstructorFenceInputs) {
6666    DCHECK(fence_object != nullptr);
6667    SetRawInputAt(0, fence_object);
6668  }
6669
6670  // The object associated with this constructor fence.
6671  //
6672  // (Note: This will be null after the prepare_for_register_allocation phase,
6673  // as all constructor fence inputs are removed there).
6674  HInstruction* GetFenceObject() const {
6675    return InputAt(0);
6676  }
6677
6678  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6679  // - Delete `fence_use` from `this`'s use list.
6680  // - Delete `this` from `fence_use`'s inputs list.
6681  // - If the `fence_use` is dead, remove it from the graph.
6682  //
6683  // A fence is considered dead once it no longer has any uses
6684  // and all of the inputs are dead.
6685  //
6686  // This must *not* be called during/after prepare_for_register_allocation,
6687  // because that removes all the inputs to the fences but the fence is actually
6688  // still considered live.
6689  //
6690  // Returns how many HConstructorFence instructions were removed from graph.
6691  static size_t RemoveConstructorFences(HInstruction* instruction);
6692
6693  // Combine all inputs of `this` and `other` instruction and remove
6694  // `other` from the graph.
6695  //
6696  // Inputs are unique after the merge.
6697  //
6698  // Requirement: `this` must not be the same as `other.
6699  void Merge(HConstructorFence* other);
6700
6701  // Check if this constructor fence is protecting
6702  // an HNewInstance or HNewArray that is also the immediate
6703  // predecessor of `this`.
6704  //
6705  // If `ignore_inputs` is true, then the immediate predecessor doesn't need
6706  // to be one of the inputs of `this`.
6707  //
6708  // Returns the associated HNewArray or HNewInstance,
6709  // or null otherwise.
6710  HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
6711
6712  DECLARE_INSTRUCTION(ConstructorFence);
6713
6714 private:
6715  DISALLOW_COPY_AND_ASSIGN(HConstructorFence);
6716};
6717
6718class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6719 public:
6720  enum class OperationKind {
6721    kEnter,
6722    kExit,
6723    kLast = kExit
6724  };
6725
6726  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6727    : HTemplateInstruction(
6728          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6729          dex_pc) {
6730    SetPackedField<OperationKindField>(kind);
6731    SetRawInputAt(0, object);
6732  }
6733
6734  // Instruction may go into runtime, so we need an environment.
6735  bool NeedsEnvironment() const OVERRIDE { return true; }
6736
6737  bool CanThrow() const OVERRIDE {
6738    // Verifier guarantees that monitor-exit cannot throw.
6739    // This is important because it allows the HGraphBuilder to remove
6740    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6741    return IsEnter();
6742  }
6743
6744  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6745  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6746
6747  DECLARE_INSTRUCTION(MonitorOperation);
6748
6749 private:
6750  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6751  static constexpr size_t kFieldOperationKindSize =
6752      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6753  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6754      kFieldOperationKind + kFieldOperationKindSize;
6755  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6756                "Too many packed fields.");
6757  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6758
6759 private:
6760  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6761};
6762
6763class HSelect FINAL : public HExpression<3> {
6764 public:
6765  HSelect(HInstruction* condition,
6766          HInstruction* true_value,
6767          HInstruction* false_value,
6768          uint32_t dex_pc)
6769      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6770    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6771
6772    // First input must be `true_value` or `false_value` to allow codegens to
6773    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6774    // that architectures which implement HSelect as a conditional move also
6775    // will not need to invert the condition.
6776    SetRawInputAt(0, false_value);
6777    SetRawInputAt(1, true_value);
6778    SetRawInputAt(2, condition);
6779  }
6780
6781  HInstruction* GetFalseValue() const { return InputAt(0); }
6782  HInstruction* GetTrueValue() const { return InputAt(1); }
6783  HInstruction* GetCondition() const { return InputAt(2); }
6784
6785  bool CanBeMoved() const OVERRIDE { return true; }
6786  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6787    return true;
6788  }
6789
6790  bool CanBeNull() const OVERRIDE {
6791    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6792  }
6793
6794  DECLARE_INSTRUCTION(Select);
6795
6796 private:
6797  DISALLOW_COPY_AND_ASSIGN(HSelect);
6798};
6799
6800class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6801 public:
6802  MoveOperands(Location source,
6803               Location destination,
6804               DataType::Type type,
6805               HInstruction* instruction)
6806      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6807
6808  Location GetSource() const { return source_; }
6809  Location GetDestination() const { return destination_; }
6810
6811  void SetSource(Location value) { source_ = value; }
6812  void SetDestination(Location value) { destination_ = value; }
6813
6814  // The parallel move resolver marks moves as "in-progress" by clearing the
6815  // destination (but not the source).
6816  Location MarkPending() {
6817    DCHECK(!IsPending());
6818    Location dest = destination_;
6819    destination_ = Location::NoLocation();
6820    return dest;
6821  }
6822
6823  void ClearPending(Location dest) {
6824    DCHECK(IsPending());
6825    destination_ = dest;
6826  }
6827
6828  bool IsPending() const {
6829    DCHECK(source_.IsValid() || destination_.IsInvalid());
6830    return destination_.IsInvalid() && source_.IsValid();
6831  }
6832
6833  // True if this blocks a move from the given location.
6834  bool Blocks(Location loc) const {
6835    return !IsEliminated() && source_.OverlapsWith(loc);
6836  }
6837
6838  // A move is redundant if it's been eliminated, if its source and
6839  // destination are the same, or if its destination is unneeded.
6840  bool IsRedundant() const {
6841    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6842  }
6843
6844  // We clear both operands to indicate move that's been eliminated.
6845  void Eliminate() {
6846    source_ = destination_ = Location::NoLocation();
6847  }
6848
6849  bool IsEliminated() const {
6850    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6851    return source_.IsInvalid();
6852  }
6853
6854  DataType::Type GetType() const { return type_; }
6855
6856  bool Is64BitMove() const {
6857    return DataType::Is64BitType(type_);
6858  }
6859
6860  HInstruction* GetInstruction() const { return instruction_; }
6861
6862 private:
6863  Location source_;
6864  Location destination_;
6865  // The type this move is for.
6866  DataType::Type type_;
6867  // The instruction this move is assocatied with. Null when this move is
6868  // for moving an input in the expected locations of user (including a phi user).
6869  // This is only used in debug mode, to ensure we do not connect interval siblings
6870  // in the same parallel move.
6871  HInstruction* instruction_;
6872};
6873
6874std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6875
6876static constexpr size_t kDefaultNumberOfMoves = 4;
6877
6878class HParallelMove FINAL : public HTemplateInstruction<0> {
6879 public:
6880  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6881      : HTemplateInstruction(SideEffects::None(), dex_pc),
6882        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6883    moves_.reserve(kDefaultNumberOfMoves);
6884  }
6885
6886  void AddMove(Location source,
6887               Location destination,
6888               DataType::Type type,
6889               HInstruction* instruction) {
6890    DCHECK(source.IsValid());
6891    DCHECK(destination.IsValid());
6892    if (kIsDebugBuild) {
6893      if (instruction != nullptr) {
6894        for (const MoveOperands& move : moves_) {
6895          if (move.GetInstruction() == instruction) {
6896            // Special case the situation where the move is for the spill slot
6897            // of the instruction.
6898            if ((GetPrevious() == instruction)
6899                || ((GetPrevious() == nullptr)
6900                    && instruction->IsPhi()
6901                    && instruction->GetBlock() == GetBlock())) {
6902              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6903                  << "Doing parallel moves for the same instruction.";
6904            } else {
6905              DCHECK(false) << "Doing parallel moves for the same instruction.";
6906            }
6907          }
6908        }
6909      }
6910      for (const MoveOperands& move : moves_) {
6911        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6912            << "Overlapped destination for two moves in a parallel move: "
6913            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6914            << source << " ==> " << destination;
6915      }
6916    }
6917    moves_.emplace_back(source, destination, type, instruction);
6918  }
6919
6920  MoveOperands* MoveOperandsAt(size_t index) {
6921    return &moves_[index];
6922  }
6923
6924  size_t NumMoves() const { return moves_.size(); }
6925
6926  DECLARE_INSTRUCTION(ParallelMove);
6927
6928 private:
6929  ArenaVector<MoveOperands> moves_;
6930
6931  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6932};
6933
6934}  // namespace art
6935
6936#include "nodes_vector.h"
6937
6938#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6939#include "nodes_shared.h"
6940#endif
6941#ifdef ART_ENABLE_CODEGEN_mips
6942#include "nodes_mips.h"
6943#endif
6944#ifdef ART_ENABLE_CODEGEN_x86
6945#include "nodes_x86.h"
6946#endif
6947
6948namespace art {
6949
6950class OptimizingCompilerStats;
6951
6952class HGraphVisitor : public ValueObject {
6953 public:
6954  explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
6955      : stats_(stats),
6956        graph_(graph) {}
6957  virtual ~HGraphVisitor() {}
6958
6959  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6960  virtual void VisitBasicBlock(HBasicBlock* block);
6961
6962  // Visit the graph following basic block insertion order.
6963  void VisitInsertionOrder();
6964
6965  // Visit the graph following dominator tree reverse post-order.
6966  void VisitReversePostOrder();
6967
6968  HGraph* GetGraph() const { return graph_; }
6969
6970  // Visit functions for instruction classes.
6971#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6972  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6973
6974  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6975
6976#undef DECLARE_VISIT_INSTRUCTION
6977
6978 protected:
6979  OptimizingCompilerStats* stats_;
6980
6981 private:
6982  HGraph* const graph_;
6983
6984  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6985};
6986
6987class HGraphDelegateVisitor : public HGraphVisitor {
6988 public:
6989  explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
6990      : HGraphVisitor(graph, stats) {}
6991  virtual ~HGraphDelegateVisitor() {}
6992
6993  // Visit functions that delegate to to super class.
6994#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6995  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6996
6997  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6998
6999#undef DECLARE_VISIT_INSTRUCTION
7000
7001 private:
7002  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
7003};
7004
7005// Iterator over the blocks that art part of the loop. Includes blocks part
7006// of an inner loop. The order in which the blocks are iterated is on their
7007// block id.
7008class HBlocksInLoopIterator : public ValueObject {
7009 public:
7010  explicit HBlocksInLoopIterator(const HLoopInformation& info)
7011      : blocks_in_loop_(info.GetBlocks()),
7012        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
7013        index_(0) {
7014    if (!blocks_in_loop_.IsBitSet(index_)) {
7015      Advance();
7016    }
7017  }
7018
7019  bool Done() const { return index_ == blocks_.size(); }
7020  HBasicBlock* Current() const { return blocks_[index_]; }
7021  void Advance() {
7022    ++index_;
7023    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7024      if (blocks_in_loop_.IsBitSet(index_)) {
7025        break;
7026      }
7027    }
7028  }
7029
7030 private:
7031  const BitVector& blocks_in_loop_;
7032  const ArenaVector<HBasicBlock*>& blocks_;
7033  size_t index_;
7034
7035  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
7036};
7037
7038// Iterator over the blocks that art part of the loop. Includes blocks part
7039// of an inner loop. The order in which the blocks are iterated is reverse
7040// post order.
7041class HBlocksInLoopReversePostOrderIterator : public ValueObject {
7042 public:
7043  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
7044      : blocks_in_loop_(info.GetBlocks()),
7045        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
7046        index_(0) {
7047    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7048      Advance();
7049    }
7050  }
7051
7052  bool Done() const { return index_ == blocks_.size(); }
7053  HBasicBlock* Current() const { return blocks_[index_]; }
7054  void Advance() {
7055    ++index_;
7056    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7057      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7058        break;
7059      }
7060    }
7061  }
7062
7063 private:
7064  const BitVector& blocks_in_loop_;
7065  const ArenaVector<HBasicBlock*>& blocks_;
7066  size_t index_;
7067
7068  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7069};
7070
7071// Returns int64_t value of a properly typed constant.
7072inline int64_t Int64FromConstant(HConstant* constant) {
7073  if (constant->IsIntConstant()) {
7074    return constant->AsIntConstant()->GetValue();
7075  } else if (constant->IsLongConstant()) {
7076    return constant->AsLongConstant()->GetValue();
7077  } else {
7078    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7079    return 0;
7080  }
7081}
7082
7083// Returns true iff instruction is an integral constant (and sets value on success).
7084inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7085  if (instruction->IsIntConstant()) {
7086    *value = instruction->AsIntConstant()->GetValue();
7087    return true;
7088  } else if (instruction->IsLongConstant()) {
7089    *value = instruction->AsLongConstant()->GetValue();
7090    return true;
7091  } else if (instruction->IsNullConstant()) {
7092    *value = 0;
7093    return true;
7094  }
7095  return false;
7096}
7097
7098// Returns true iff instruction is the given integral constant.
7099inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
7100  int64_t val = 0;
7101  return IsInt64AndGet(instruction, &val) && val == value;
7102}
7103
7104// Returns true iff instruction is a zero bit pattern.
7105inline bool IsZeroBitPattern(HInstruction* instruction) {
7106  return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
7107}
7108
7109#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7110  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7111  inline const H##type* HInstruction::As##type() const {                       \
7112    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7113  }                                                                            \
7114  inline H##type* HInstruction::As##type() {                                   \
7115    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7116  }
7117
7118  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7119#undef INSTRUCTION_TYPE_CHECK
7120
7121// Create space in `blocks` for adding `number_of_new_blocks` entries
7122// starting at location `at`. Blocks after `at` are moved accordingly.
7123inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7124                        size_t number_of_new_blocks,
7125                        size_t after) {
7126  DCHECK_LT(after, blocks->size());
7127  size_t old_size = blocks->size();
7128  size_t new_size = old_size + number_of_new_blocks;
7129  blocks->resize(new_size);
7130  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7131}
7132
7133/*
7134 * Hunt "under the hood" of array lengths (leading to array references),
7135 * null checks (also leading to array references), and new arrays
7136 * (leading to the actual length). This makes it more likely related
7137 * instructions become actually comparable.
7138 */
7139inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7140  while (instruction->IsArrayLength() ||
7141         instruction->IsNullCheck() ||
7142         instruction->IsNewArray()) {
7143    instruction = instruction->IsNewArray()
7144        ? instruction->AsNewArray()->GetLength()
7145        : instruction->InputAt(0);
7146  }
7147  return instruction;
7148}
7149
7150void RemoveEnvironmentUses(HInstruction* instruction);
7151bool HasEnvironmentUsedByOthers(HInstruction* instruction);
7152void ResetEnvironmentInputRecords(HInstruction* instruction);
7153
7154}  // namespace art
7155
7156#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7157