nodes.h revision a290160f74ee53c0ffb51c7b3ac916d239c9556a
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "data_type.h"
32#include "deoptimization_kind.h"
33#include "dex_file.h"
34#include "dex_file_types.h"
35#include "entrypoints/quick/quick_entrypoints_enum.h"
36#include "handle.h"
37#include "handle_scope.h"
38#include "intrinsics_enum.h"
39#include "invoke_type.h"
40#include "locations.h"
41#include "method_reference.h"
42#include "mirror/class.h"
43#include "offsets.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class GraphChecker;
49class HBasicBlock;
50class HConstructorFence;
51class HCurrentMethod;
52class HDoubleConstant;
53class HEnvironment;
54class HFloatConstant;
55class HGraphBuilder;
56class HGraphVisitor;
57class HInstruction;
58class HIntConstant;
59class HInvoke;
60class HLongConstant;
61class HNullConstant;
62class HParameterValue;
63class HPhi;
64class HSuspendCheck;
65class HTryBoundary;
66class LiveInterval;
67class LocationSummary;
68class SlowPathCode;
69class SsaBuilder;
70
71namespace mirror {
72class DexCache;
73}  // namespace mirror
74
75static const int kDefaultNumberOfBlocks = 8;
76static const int kDefaultNumberOfSuccessors = 2;
77static const int kDefaultNumberOfPredecessors = 2;
78static const int kDefaultNumberOfExceptionalPredecessors = 0;
79static const int kDefaultNumberOfDominatedBlocks = 1;
80static const int kDefaultNumberOfBackEdges = 1;
81
82// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
83static constexpr int32_t kMaxIntShiftDistance = 0x1f;
84// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
85static constexpr int32_t kMaxLongShiftDistance = 0x3f;
86
87static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
88static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
89
90static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
91
92static constexpr uint32_t kNoDexPc = -1;
93
94inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
95  // For the purposes of the compiler, the dex files must actually be the same object
96  // if we want to safely treat them as the same. This is especially important for JIT
97  // as custom class loaders can open the same underlying file (or memory) multiple
98  // times and provide different class resolution but no two class loaders should ever
99  // use the same DexFile object - doing so is an unsupported hack that can lead to
100  // all sorts of weird failures.
101  return &lhs == &rhs;
102}
103
104enum IfCondition {
105  // All types.
106  kCondEQ,  // ==
107  kCondNE,  // !=
108  // Signed integers and floating-point numbers.
109  kCondLT,  // <
110  kCondLE,  // <=
111  kCondGT,  // >
112  kCondGE,  // >=
113  // Unsigned integers.
114  kCondB,   // <
115  kCondBE,  // <=
116  kCondA,   // >
117  kCondAE,  // >=
118  // First and last aliases.
119  kCondFirst = kCondEQ,
120  kCondLast = kCondAE,
121};
122
123enum GraphAnalysisResult {
124  kAnalysisSkipped,
125  kAnalysisInvalidBytecode,
126  kAnalysisFailThrowCatchLoop,
127  kAnalysisFailAmbiguousArrayOp,
128  kAnalysisSuccess,
129};
130
131template <typename T>
132static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
133  return static_cast<typename std::make_unsigned<T>::type>(x);
134}
135
136class HInstructionList : public ValueObject {
137 public:
138  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
139
140  void AddInstruction(HInstruction* instruction);
141  void RemoveInstruction(HInstruction* instruction);
142
143  // Insert `instruction` before/after an existing instruction `cursor`.
144  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
145  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
146
147  // Return true if this list contains `instruction`.
148  bool Contains(HInstruction* instruction) const;
149
150  // Return true if `instruction1` is found before `instruction2` in
151  // this instruction list and false otherwise.  Abort if none
152  // of these instructions is found.
153  bool FoundBefore(const HInstruction* instruction1,
154                   const HInstruction* instruction2) const;
155
156  bool IsEmpty() const { return first_instruction_ == nullptr; }
157  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
158
159  // Update the block of all instructions to be `block`.
160  void SetBlockOfInstructions(HBasicBlock* block) const;
161
162  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
163  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
164  void Add(const HInstructionList& instruction_list);
165
166  // Return the number of instructions in the list. This is an expensive operation.
167  size_t CountSize() const;
168
169 private:
170  HInstruction* first_instruction_;
171  HInstruction* last_instruction_;
172
173  friend class HBasicBlock;
174  friend class HGraph;
175  friend class HInstruction;
176  friend class HInstructionIterator;
177  friend class HInstructionIteratorHandleChanges;
178  friend class HBackwardInstructionIterator;
179
180  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
181};
182
183class ReferenceTypeInfo : ValueObject {
184 public:
185  typedef Handle<mirror::Class> TypeHandle;
186
187  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
188
189  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
190    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
191  }
192
193  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
194    return ReferenceTypeInfo(type_handle, is_exact);
195  }
196
197  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
198
199  static bool IsValidHandle(TypeHandle handle) {
200    return handle.GetReference() != nullptr;
201  }
202
203  bool IsValid() const {
204    return IsValidHandle(type_handle_);
205  }
206
207  bool IsExact() const { return is_exact_; }
208
209  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsObjectClass();
212  }
213
214  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    return GetTypeHandle()->IsStringClass();
217  }
218
219  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
220    DCHECK(IsValid());
221    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
222  }
223
224  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
225    DCHECK(IsValid());
226    return GetTypeHandle()->IsInterface();
227  }
228
229  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230    DCHECK(IsValid());
231    return GetTypeHandle()->IsArrayClass();
232  }
233
234  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
235    DCHECK(IsValid());
236    return GetTypeHandle()->IsPrimitiveArray();
237  }
238
239  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
240    DCHECK(IsValid());
241    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
242  }
243
244  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
245    DCHECK(IsValid());
246    if (!IsExact()) return false;
247    if (!IsArrayClass()) return false;
248    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
249  }
250
251  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
252    DCHECK(IsValid());
253    if (!IsExact()) return false;
254    if (!IsArrayClass()) return false;
255    if (!rti.IsArrayClass()) return false;
256    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
257        rti.GetTypeHandle()->GetComponentType());
258  }
259
260  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
261
262  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
263    DCHECK(IsValid());
264    DCHECK(rti.IsValid());
265    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
266  }
267
268  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269    DCHECK(IsValid());
270    DCHECK(rti.IsValid());
271    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
272        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
273  }
274
275  // Returns true if the type information provide the same amount of details.
276  // Note that it does not mean that the instructions have the same actual type
277  // (because the type can be the result of a merge).
278  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
279    if (!IsValid() && !rti.IsValid()) {
280      // Invalid types are equal.
281      return true;
282    }
283    if (!IsValid() || !rti.IsValid()) {
284      // One is valid, the other not.
285      return false;
286    }
287    return IsExact() == rti.IsExact()
288        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
289  }
290
291 private:
292  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
293  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
294      : type_handle_(type_handle), is_exact_(is_exact) { }
295
296  // The class of the object.
297  TypeHandle type_handle_;
298  // Whether or not the type is exact or a superclass of the actual type.
299  // Whether or not we have any information about this type.
300  bool is_exact_;
301};
302
303std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
304
305// Control-flow graph of a method. Contains a list of basic blocks.
306class HGraph : public ArenaObject<kArenaAllocGraph> {
307 public:
308  HGraph(ArenaAllocator* arena,
309         const DexFile& dex_file,
310         uint32_t method_idx,
311         InstructionSet instruction_set,
312         InvokeType invoke_type = kInvalidInvokeType,
313         bool debuggable = false,
314         bool osr = false,
315         int start_instruction_id = 0)
316      : arena_(arena),
317        blocks_(arena->Adapter(kArenaAllocBlockList)),
318        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
319        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
320        entry_block_(nullptr),
321        exit_block_(nullptr),
322        maximum_number_of_out_vregs_(0),
323        number_of_vregs_(0),
324        number_of_in_vregs_(0),
325        temporaries_vreg_slots_(0),
326        has_bounds_checks_(false),
327        has_try_catch_(false),
328        has_simd_(false),
329        has_loops_(false),
330        has_irreducible_loops_(false),
331        debuggable_(debuggable),
332        current_instruction_id_(start_instruction_id),
333        dex_file_(dex_file),
334        method_idx_(method_idx),
335        invoke_type_(invoke_type),
336        in_ssa_form_(false),
337        number_of_cha_guards_(0),
338        instruction_set_(instruction_set),
339        cached_null_constant_(nullptr),
340        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
342        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
343        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
344        cached_current_method_(nullptr),
345        art_method_(nullptr),
346        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
347        osr_(osr),
348        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
349    blocks_.reserve(kDefaultNumberOfBlocks);
350  }
351
352  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
353  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
354
355  ArenaAllocator* GetArena() const { return arena_; }
356  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
357
358  bool IsInSsaForm() const { return in_ssa_form_; }
359  void SetInSsaForm() { in_ssa_form_ = true; }
360
361  HBasicBlock* GetEntryBlock() const { return entry_block_; }
362  HBasicBlock* GetExitBlock() const { return exit_block_; }
363  bool HasExitBlock() const { return exit_block_ != nullptr; }
364
365  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
366  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
367
368  void AddBlock(HBasicBlock* block);
369
370  void ComputeDominanceInformation();
371  void ClearDominanceInformation();
372  void ClearLoopInformation();
373  void FindBackEdges(ArenaBitVector* visited);
374  GraphAnalysisResult BuildDominatorTree();
375  void SimplifyCFG();
376  void SimplifyCatchBlocks();
377
378  // Analyze all natural loops in this graph. Returns a code specifying that it
379  // was successful or the reason for failure. The method will fail if a loop
380  // is a throw-catch loop, i.e. the header is a catch block.
381  GraphAnalysisResult AnalyzeLoops() const;
382
383  // Iterate over blocks to compute try block membership. Needs reverse post
384  // order and loop information.
385  void ComputeTryBlockInformation();
386
387  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
388  // Returns the instruction to replace the invoke expression or null if the
389  // invoke is for a void method. Note that the caller is responsible for replacing
390  // and removing the invoke instruction.
391  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
392
393  // Update the loop and try membership of `block`, which was spawned from `reference`.
394  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
395  // should be the new back edge.
396  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
397                                             HBasicBlock* reference,
398                                             bool replace_if_back_edge);
399
400  // Need to add a couple of blocks to test if the loop body is entered and
401  // put deoptimization instructions, etc.
402  void TransformLoopHeaderForBCE(HBasicBlock* header);
403
404  // Adds a new loop directly after the loop with the given header and exit.
405  // Returns the new preheader.
406  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
407                                             HBasicBlock* body,
408                                             HBasicBlock* exit);
409
410  // Removes `block` from the graph. Assumes `block` has been disconnected from
411  // other blocks and has no instructions or phis.
412  void DeleteDeadEmptyBlock(HBasicBlock* block);
413
414  // Splits the edge between `block` and `successor` while preserving the
415  // indices in the predecessor/successor lists. If there are multiple edges
416  // between the blocks, the lowest indices are used.
417  // Returns the new block which is empty and has the same dex pc as `successor`.
418  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
419
420  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
421  void OrderLoopHeaderPredecessors(HBasicBlock* header);
422  void SimplifyLoop(HBasicBlock* header);
423
424  int32_t GetNextInstructionId() {
425    CHECK_NE(current_instruction_id_, INT32_MAX);
426    return current_instruction_id_++;
427  }
428
429  int32_t GetCurrentInstructionId() const {
430    return current_instruction_id_;
431  }
432
433  void SetCurrentInstructionId(int32_t id) {
434    CHECK_GE(id, current_instruction_id_);
435    current_instruction_id_ = id;
436  }
437
438  uint16_t GetMaximumNumberOfOutVRegs() const {
439    return maximum_number_of_out_vregs_;
440  }
441
442  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
443    maximum_number_of_out_vregs_ = new_value;
444  }
445
446  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
447    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
448  }
449
450  void UpdateTemporariesVRegSlots(size_t slots) {
451    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
452  }
453
454  size_t GetTemporariesVRegSlots() const {
455    DCHECK(!in_ssa_form_);
456    return temporaries_vreg_slots_;
457  }
458
459  void SetNumberOfVRegs(uint16_t number_of_vregs) {
460    number_of_vregs_ = number_of_vregs;
461  }
462
463  uint16_t GetNumberOfVRegs() const {
464    return number_of_vregs_;
465  }
466
467  void SetNumberOfInVRegs(uint16_t value) {
468    number_of_in_vregs_ = value;
469  }
470
471  uint16_t GetNumberOfInVRegs() const {
472    return number_of_in_vregs_;
473  }
474
475  uint16_t GetNumberOfLocalVRegs() const {
476    DCHECK(!in_ssa_form_);
477    return number_of_vregs_ - number_of_in_vregs_;
478  }
479
480  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
481    return reverse_post_order_;
482  }
483
484  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
485    DCHECK(GetReversePostOrder()[0] == entry_block_);
486    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
487  }
488
489  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
490    return ReverseRange(GetReversePostOrder());
491  }
492
493  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
494    return linear_order_;
495  }
496
497  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
498    return ReverseRange(GetLinearOrder());
499  }
500
501  bool HasBoundsChecks() const {
502    return has_bounds_checks_;
503  }
504
505  void SetHasBoundsChecks(bool value) {
506    has_bounds_checks_ = value;
507  }
508
509  bool IsDebuggable() const { return debuggable_; }
510
511  // Returns a constant of the given type and value. If it does not exist
512  // already, it is created and inserted into the graph. This method is only for
513  // integral types.
514  HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
515
516  // TODO: This is problematic for the consistency of reference type propagation
517  // because it can be created anytime after the pass and thus it will be left
518  // with an invalid type.
519  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
520
521  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
522    return CreateConstant(value, &cached_int_constants_, dex_pc);
523  }
524  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
525    return CreateConstant(value, &cached_long_constants_, dex_pc);
526  }
527  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
528    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
529  }
530  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
531    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
532  }
533
534  HCurrentMethod* GetCurrentMethod();
535
536  const DexFile& GetDexFile() const {
537    return dex_file_;
538  }
539
540  uint32_t GetMethodIdx() const {
541    return method_idx_;
542  }
543
544  // Get the method name (without the signature), e.g. "<init>"
545  const char* GetMethodName() const;
546
547  // Get the pretty method name (class + name + optionally signature).
548  std::string PrettyMethod(bool with_signature = true) const;
549
550  InvokeType GetInvokeType() const {
551    return invoke_type_;
552  }
553
554  InstructionSet GetInstructionSet() const {
555    return instruction_set_;
556  }
557
558  bool IsCompilingOsr() const { return osr_; }
559
560  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
561    return cha_single_implementation_list_;
562  }
563
564  void AddCHASingleImplementationDependency(ArtMethod* method) {
565    cha_single_implementation_list_.insert(method);
566  }
567
568  bool HasShouldDeoptimizeFlag() const {
569    return number_of_cha_guards_ != 0;
570  }
571
572  bool HasTryCatch() const { return has_try_catch_; }
573  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
574
575  bool HasSIMD() const { return has_simd_; }
576  void SetHasSIMD(bool value) { has_simd_ = value; }
577
578  bool HasLoops() const { return has_loops_; }
579  void SetHasLoops(bool value) { has_loops_ = value; }
580
581  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
582  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
583
584  ArtMethod* GetArtMethod() const { return art_method_; }
585  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
586
587  // Returns an instruction with the opposite Boolean value from 'cond'.
588  // The instruction has been inserted into the graph, either as a constant, or
589  // before cursor.
590  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
591
592  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
593
594  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
595  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
596  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
597
598 private:
599  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
600  void RemoveDeadBlocks(const ArenaBitVector& visited);
601
602  template <class InstructionType, typename ValueType>
603  InstructionType* CreateConstant(ValueType value,
604                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
605                                  uint32_t dex_pc = kNoDexPc) {
606    // Try to find an existing constant of the given value.
607    InstructionType* constant = nullptr;
608    auto cached_constant = cache->find(value);
609    if (cached_constant != cache->end()) {
610      constant = cached_constant->second;
611    }
612
613    // If not found or previously deleted, create and cache a new instruction.
614    // Don't bother reviving a previously deleted instruction, for simplicity.
615    if (constant == nullptr || constant->GetBlock() == nullptr) {
616      constant = new (arena_) InstructionType(value, dex_pc);
617      cache->Overwrite(value, constant);
618      InsertConstant(constant);
619    }
620    return constant;
621  }
622
623  void InsertConstant(HConstant* instruction);
624
625  // Cache a float constant into the graph. This method should only be
626  // called by the SsaBuilder when creating "equivalent" instructions.
627  void CacheFloatConstant(HFloatConstant* constant);
628
629  // See CacheFloatConstant comment.
630  void CacheDoubleConstant(HDoubleConstant* constant);
631
632  ArenaAllocator* const arena_;
633
634  // List of blocks in insertion order.
635  ArenaVector<HBasicBlock*> blocks_;
636
637  // List of blocks to perform a reverse post order tree traversal.
638  ArenaVector<HBasicBlock*> reverse_post_order_;
639
640  // List of blocks to perform a linear order tree traversal. Unlike the reverse
641  // post order, this order is not incrementally kept up-to-date.
642  ArenaVector<HBasicBlock*> linear_order_;
643
644  HBasicBlock* entry_block_;
645  HBasicBlock* exit_block_;
646
647  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
648  uint16_t maximum_number_of_out_vregs_;
649
650  // The number of virtual registers in this method. Contains the parameters.
651  uint16_t number_of_vregs_;
652
653  // The number of virtual registers used by parameters of this method.
654  uint16_t number_of_in_vregs_;
655
656  // Number of vreg size slots that the temporaries use (used in baseline compiler).
657  size_t temporaries_vreg_slots_;
658
659  // Flag whether there are bounds checks in the graph. We can skip
660  // BCE if it's false. It's only best effort to keep it up to date in
661  // the presence of code elimination so there might be false positives.
662  bool has_bounds_checks_;
663
664  // Flag whether there are try/catch blocks in the graph. We will skip
665  // try/catch-related passes if it's false. It's only best effort to keep
666  // it up to date in the presence of code elimination so there might be
667  // false positives.
668  bool has_try_catch_;
669
670  // Flag whether SIMD instructions appear in the graph. If true, the
671  // code generators may have to be more careful spilling the wider
672  // contents of SIMD registers.
673  bool has_simd_;
674
675  // Flag whether there are any loops in the graph. We can skip loop
676  // optimization if it's false. It's only best effort to keep it up
677  // to date in the presence of code elimination so there might be false
678  // positives.
679  bool has_loops_;
680
681  // Flag whether there are any irreducible loops in the graph. It's only
682  // best effort to keep it up to date in the presence of code elimination
683  // so there might be false positives.
684  bool has_irreducible_loops_;
685
686  // Indicates whether the graph should be compiled in a way that
687  // ensures full debuggability. If false, we can apply more
688  // aggressive optimizations that may limit the level of debugging.
689  const bool debuggable_;
690
691  // The current id to assign to a newly added instruction. See HInstruction.id_.
692  int32_t current_instruction_id_;
693
694  // The dex file from which the method is from.
695  const DexFile& dex_file_;
696
697  // The method index in the dex file.
698  const uint32_t method_idx_;
699
700  // If inlined, this encodes how the callee is being invoked.
701  const InvokeType invoke_type_;
702
703  // Whether the graph has been transformed to SSA form. Only used
704  // in debug mode to ensure we are not using properties only valid
705  // for non-SSA form (like the number of temporaries).
706  bool in_ssa_form_;
707
708  // Number of CHA guards in the graph. Used to short-circuit the
709  // CHA guard optimization pass when there is no CHA guard left.
710  uint32_t number_of_cha_guards_;
711
712  const InstructionSet instruction_set_;
713
714  // Cached constants.
715  HNullConstant* cached_null_constant_;
716  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
717  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
718  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
719  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
720
721  HCurrentMethod* cached_current_method_;
722
723  // The ArtMethod this graph is for. Note that for AOT, it may be null,
724  // for example for methods whose declaring class could not be resolved
725  // (such as when the superclass could not be found).
726  ArtMethod* art_method_;
727
728  // Keep the RTI of inexact Object to avoid having to pass stack handle
729  // collection pointer to passes which may create NullConstant.
730  ReferenceTypeInfo inexact_object_rti_;
731
732  // Whether we are compiling this graph for on stack replacement: this will
733  // make all loops seen as irreducible and emit special stack maps to mark
734  // compiled code entries which the interpreter can directly jump to.
735  const bool osr_;
736
737  // List of methods that are assumed to have single implementation.
738  ArenaSet<ArtMethod*> cha_single_implementation_list_;
739
740  friend class SsaBuilder;           // For caching constants.
741  friend class SsaLivenessAnalysis;  // For the linear order.
742  friend class HInliner;             // For the reverse post order.
743  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
744  DISALLOW_COPY_AND_ASSIGN(HGraph);
745};
746
747class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
748 public:
749  HLoopInformation(HBasicBlock* header, HGraph* graph)
750      : header_(header),
751        suspend_check_(nullptr),
752        irreducible_(false),
753        contains_irreducible_loop_(false),
754        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
755        // Make bit vector growable, as the number of blocks may change.
756        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
757    back_edges_.reserve(kDefaultNumberOfBackEdges);
758  }
759
760  bool IsIrreducible() const { return irreducible_; }
761  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
762
763  void Dump(std::ostream& os);
764
765  HBasicBlock* GetHeader() const {
766    return header_;
767  }
768
769  void SetHeader(HBasicBlock* block) {
770    header_ = block;
771  }
772
773  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
774  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
775  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
776
777  void AddBackEdge(HBasicBlock* back_edge) {
778    back_edges_.push_back(back_edge);
779  }
780
781  void RemoveBackEdge(HBasicBlock* back_edge) {
782    RemoveElement(back_edges_, back_edge);
783  }
784
785  bool IsBackEdge(const HBasicBlock& block) const {
786    return ContainsElement(back_edges_, &block);
787  }
788
789  size_t NumberOfBackEdges() const {
790    return back_edges_.size();
791  }
792
793  HBasicBlock* GetPreHeader() const;
794
795  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
796    return back_edges_;
797  }
798
799  // Returns the lifetime position of the back edge that has the
800  // greatest lifetime position.
801  size_t GetLifetimeEnd() const;
802
803  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
804    ReplaceElement(back_edges_, existing, new_back_edge);
805  }
806
807  // Finds blocks that are part of this loop.
808  void Populate();
809
810  // Returns whether this loop information contains `block`.
811  // Note that this loop information *must* be populated before entering this function.
812  bool Contains(const HBasicBlock& block) const;
813
814  // Returns whether this loop information is an inner loop of `other`.
815  // Note that `other` *must* be populated before entering this function.
816  bool IsIn(const HLoopInformation& other) const;
817
818  // Returns true if instruction is not defined within this loop.
819  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
820
821  const ArenaBitVector& GetBlocks() const { return blocks_; }
822
823  void Add(HBasicBlock* block);
824  void Remove(HBasicBlock* block);
825
826  void ClearAllBlocks() {
827    blocks_.ClearAllBits();
828  }
829
830  bool HasBackEdgeNotDominatedByHeader() const;
831
832  bool IsPopulated() const {
833    return blocks_.GetHighestBitSet() != -1;
834  }
835
836  bool DominatesAllBackEdges(HBasicBlock* block);
837
838  bool HasExitEdge() const;
839
840 private:
841  // Internal recursive implementation of `Populate`.
842  void PopulateRecursive(HBasicBlock* block);
843  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
844
845  HBasicBlock* header_;
846  HSuspendCheck* suspend_check_;
847  bool irreducible_;
848  bool contains_irreducible_loop_;
849  ArenaVector<HBasicBlock*> back_edges_;
850  ArenaBitVector blocks_;
851
852  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
853};
854
855// Stores try/catch information for basic blocks.
856// Note that HGraph is constructed so that catch blocks cannot simultaneously
857// be try blocks.
858class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
859 public:
860  // Try block information constructor.
861  explicit TryCatchInformation(const HTryBoundary& try_entry)
862      : try_entry_(&try_entry),
863        catch_dex_file_(nullptr),
864        catch_type_index_(DexFile::kDexNoIndex16) {
865    DCHECK(try_entry_ != nullptr);
866  }
867
868  // Catch block information constructor.
869  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
870      : try_entry_(nullptr),
871        catch_dex_file_(&dex_file),
872        catch_type_index_(catch_type_index) {}
873
874  bool IsTryBlock() const { return try_entry_ != nullptr; }
875
876  const HTryBoundary& GetTryEntry() const {
877    DCHECK(IsTryBlock());
878    return *try_entry_;
879  }
880
881  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
882
883  bool IsCatchAllTypeIndex() const {
884    DCHECK(IsCatchBlock());
885    return !catch_type_index_.IsValid();
886  }
887
888  dex::TypeIndex GetCatchTypeIndex() const {
889    DCHECK(IsCatchBlock());
890    return catch_type_index_;
891  }
892
893  const DexFile& GetCatchDexFile() const {
894    DCHECK(IsCatchBlock());
895    return *catch_dex_file_;
896  }
897
898 private:
899  // One of possibly several TryBoundary instructions entering the block's try.
900  // Only set for try blocks.
901  const HTryBoundary* try_entry_;
902
903  // Exception type information. Only set for catch blocks.
904  const DexFile* catch_dex_file_;
905  const dex::TypeIndex catch_type_index_;
906};
907
908static constexpr size_t kNoLifetime = -1;
909static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
910
911// A block in a method. Contains the list of instructions represented
912// as a double linked list. Each block knows its predecessors and
913// successors.
914
915class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
916 public:
917  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
918      : graph_(graph),
919        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
920        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
921        loop_information_(nullptr),
922        dominator_(nullptr),
923        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
924        block_id_(kInvalidBlockId),
925        dex_pc_(dex_pc),
926        lifetime_start_(kNoLifetime),
927        lifetime_end_(kNoLifetime),
928        try_catch_information_(nullptr) {
929    predecessors_.reserve(kDefaultNumberOfPredecessors);
930    successors_.reserve(kDefaultNumberOfSuccessors);
931    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
932  }
933
934  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
935    return predecessors_;
936  }
937
938  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
939    return successors_;
940  }
941
942  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
943  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
944
945  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
946    return ContainsElement(successors_, block, start_from);
947  }
948
949  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
950    return dominated_blocks_;
951  }
952
953  bool IsEntryBlock() const {
954    return graph_->GetEntryBlock() == this;
955  }
956
957  bool IsExitBlock() const {
958    return graph_->GetExitBlock() == this;
959  }
960
961  bool IsSingleGoto() const;
962  bool IsSingleReturn() const;
963  bool IsSingleTryBoundary() const;
964
965  // Returns true if this block emits nothing but a jump.
966  bool IsSingleJump() const {
967    HLoopInformation* loop_info = GetLoopInformation();
968    return (IsSingleGoto() || IsSingleTryBoundary())
969           // Back edges generate a suspend check.
970           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
971  }
972
973  void AddBackEdge(HBasicBlock* back_edge) {
974    if (loop_information_ == nullptr) {
975      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
976    }
977    DCHECK_EQ(loop_information_->GetHeader(), this);
978    loop_information_->AddBackEdge(back_edge);
979  }
980
981  HGraph* GetGraph() const { return graph_; }
982  void SetGraph(HGraph* graph) { graph_ = graph; }
983
984  uint32_t GetBlockId() const { return block_id_; }
985  void SetBlockId(int id) { block_id_ = id; }
986  uint32_t GetDexPc() const { return dex_pc_; }
987
988  HBasicBlock* GetDominator() const { return dominator_; }
989  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
990  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
991
992  void RemoveDominatedBlock(HBasicBlock* block) {
993    RemoveElement(dominated_blocks_, block);
994  }
995
996  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
997    ReplaceElement(dominated_blocks_, existing, new_block);
998  }
999
1000  void ClearDominanceInformation();
1001
1002  int NumberOfBackEdges() const {
1003    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1004  }
1005
1006  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1007  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1008  const HInstructionList& GetInstructions() const { return instructions_; }
1009  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1010  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1011  const HInstructionList& GetPhis() const { return phis_; }
1012
1013  HInstruction* GetFirstInstructionDisregardMoves() const;
1014
1015  void AddSuccessor(HBasicBlock* block) {
1016    successors_.push_back(block);
1017    block->predecessors_.push_back(this);
1018  }
1019
1020  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1021    size_t successor_index = GetSuccessorIndexOf(existing);
1022    existing->RemovePredecessor(this);
1023    new_block->predecessors_.push_back(this);
1024    successors_[successor_index] = new_block;
1025  }
1026
1027  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1028    size_t predecessor_index = GetPredecessorIndexOf(existing);
1029    existing->RemoveSuccessor(this);
1030    new_block->successors_.push_back(this);
1031    predecessors_[predecessor_index] = new_block;
1032  }
1033
1034  // Insert `this` between `predecessor` and `successor. This method
1035  // preserves the indicies, and will update the first edge found between
1036  // `predecessor` and `successor`.
1037  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1038    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1039    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1040    successor->predecessors_[predecessor_index] = this;
1041    predecessor->successors_[successor_index] = this;
1042    successors_.push_back(successor);
1043    predecessors_.push_back(predecessor);
1044  }
1045
1046  void RemovePredecessor(HBasicBlock* block) {
1047    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1048  }
1049
1050  void RemoveSuccessor(HBasicBlock* block) {
1051    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1052  }
1053
1054  void ClearAllPredecessors() {
1055    predecessors_.clear();
1056  }
1057
1058  void AddPredecessor(HBasicBlock* block) {
1059    predecessors_.push_back(block);
1060    block->successors_.push_back(this);
1061  }
1062
1063  void SwapPredecessors() {
1064    DCHECK_EQ(predecessors_.size(), 2u);
1065    std::swap(predecessors_[0], predecessors_[1]);
1066  }
1067
1068  void SwapSuccessors() {
1069    DCHECK_EQ(successors_.size(), 2u);
1070    std::swap(successors_[0], successors_[1]);
1071  }
1072
1073  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1074    return IndexOfElement(predecessors_, predecessor);
1075  }
1076
1077  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1078    return IndexOfElement(successors_, successor);
1079  }
1080
1081  HBasicBlock* GetSinglePredecessor() const {
1082    DCHECK_EQ(GetPredecessors().size(), 1u);
1083    return GetPredecessors()[0];
1084  }
1085
1086  HBasicBlock* GetSingleSuccessor() const {
1087    DCHECK_EQ(GetSuccessors().size(), 1u);
1088    return GetSuccessors()[0];
1089  }
1090
1091  // Returns whether the first occurrence of `predecessor` in the list of
1092  // predecessors is at index `idx`.
1093  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1094    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1095    return GetPredecessorIndexOf(predecessor) == idx;
1096  }
1097
1098  // Create a new block between this block and its predecessors. The new block
1099  // is added to the graph, all predecessor edges are relinked to it and an edge
1100  // is created to `this`. Returns the new empty block. Reverse post order or
1101  // loop and try/catch information are not updated.
1102  HBasicBlock* CreateImmediateDominator();
1103
1104  // Split the block into two blocks just before `cursor`. Returns the newly
1105  // created, latter block. Note that this method will add the block to the
1106  // graph, create a Goto at the end of the former block and will create an edge
1107  // between the blocks. It will not, however, update the reverse post order or
1108  // loop and try/catch information.
1109  HBasicBlock* SplitBefore(HInstruction* cursor);
1110
1111  // Split the block into two blocks just before `cursor`. Returns the newly
1112  // created block. Note that this method just updates raw block information,
1113  // like predecessors, successors, dominators, and instruction list. It does not
1114  // update the graph, reverse post order, loop information, nor make sure the
1115  // blocks are consistent (for example ending with a control flow instruction).
1116  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1117
1118  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1119  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1120
1121  // Merge `other` at the end of `this`. Successors and dominated blocks of
1122  // `other` are changed to be successors and dominated blocks of `this`. Note
1123  // that this method does not update the graph, reverse post order, loop
1124  // information, nor make sure the blocks are consistent (for example ending
1125  // with a control flow instruction).
1126  void MergeWithInlined(HBasicBlock* other);
1127
1128  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1129  // of `this` are moved to `other`.
1130  // Note that this method does not update the graph, reverse post order, loop
1131  // information, nor make sure the blocks are consistent (for example ending
1132  // with a control flow instruction).
1133  void ReplaceWith(HBasicBlock* other);
1134
1135  // Merges the instructions of `other` at the end of `this`.
1136  void MergeInstructionsWith(HBasicBlock* other);
1137
1138  // Merge `other` at the end of `this`. This method updates loops, reverse post
1139  // order, links to predecessors, successors, dominators and deletes the block
1140  // from the graph. The two blocks must be successive, i.e. `this` the only
1141  // predecessor of `other` and vice versa.
1142  void MergeWith(HBasicBlock* other);
1143
1144  // Disconnects `this` from all its predecessors, successors and dominator,
1145  // removes it from all loops it is included in and eventually from the graph.
1146  // The block must not dominate any other block. Predecessors and successors
1147  // are safely updated.
1148  void DisconnectAndDelete();
1149
1150  void AddInstruction(HInstruction* instruction);
1151  // Insert `instruction` before/after an existing instruction `cursor`.
1152  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1153  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1154  // Replace instruction `initial` with `replacement` within this block.
1155  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1156                                       HInstruction* replacement);
1157  void AddPhi(HPhi* phi);
1158  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1159  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1160  // instruction list. With 'ensure_safety' set to true, it verifies that the
1161  // instruction is not in use and removes it from the use lists of its inputs.
1162  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1163  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1164  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1165
1166  bool IsLoopHeader() const {
1167    return IsInLoop() && (loop_information_->GetHeader() == this);
1168  }
1169
1170  bool IsLoopPreHeaderFirstPredecessor() const {
1171    DCHECK(IsLoopHeader());
1172    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1173  }
1174
1175  bool IsFirstPredecessorBackEdge() const {
1176    DCHECK(IsLoopHeader());
1177    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1178  }
1179
1180  HLoopInformation* GetLoopInformation() const {
1181    return loop_information_;
1182  }
1183
1184  // Set the loop_information_ on this block. Overrides the current
1185  // loop_information if it is an outer loop of the passed loop information.
1186  // Note that this method is called while creating the loop information.
1187  void SetInLoop(HLoopInformation* info) {
1188    if (IsLoopHeader()) {
1189      // Nothing to do. This just means `info` is an outer loop.
1190    } else if (!IsInLoop()) {
1191      loop_information_ = info;
1192    } else if (loop_information_->Contains(*info->GetHeader())) {
1193      // Block is currently part of an outer loop. Make it part of this inner loop.
1194      // Note that a non loop header having a loop information means this loop information
1195      // has already been populated
1196      loop_information_ = info;
1197    } else {
1198      // Block is part of an inner loop. Do not update the loop information.
1199      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1200      // at this point, because this method is being called while populating `info`.
1201    }
1202  }
1203
1204  // Raw update of the loop information.
1205  void SetLoopInformation(HLoopInformation* info) {
1206    loop_information_ = info;
1207  }
1208
1209  bool IsInLoop() const { return loop_information_ != nullptr; }
1210
1211  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1212
1213  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1214    try_catch_information_ = try_catch_information;
1215  }
1216
1217  bool IsTryBlock() const {
1218    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1219  }
1220
1221  bool IsCatchBlock() const {
1222    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1223  }
1224
1225  // Returns the try entry that this block's successors should have. They will
1226  // be in the same try, unless the block ends in a try boundary. In that case,
1227  // the appropriate try entry will be returned.
1228  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1229
1230  bool HasThrowingInstructions() const;
1231
1232  // Returns whether this block dominates the blocked passed as parameter.
1233  bool Dominates(HBasicBlock* block) const;
1234
1235  size_t GetLifetimeStart() const { return lifetime_start_; }
1236  size_t GetLifetimeEnd() const { return lifetime_end_; }
1237
1238  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1239  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1240
1241  bool EndsWithControlFlowInstruction() const;
1242  bool EndsWithIf() const;
1243  bool EndsWithTryBoundary() const;
1244  bool HasSinglePhi() const;
1245
1246 private:
1247  HGraph* graph_;
1248  ArenaVector<HBasicBlock*> predecessors_;
1249  ArenaVector<HBasicBlock*> successors_;
1250  HInstructionList instructions_;
1251  HInstructionList phis_;
1252  HLoopInformation* loop_information_;
1253  HBasicBlock* dominator_;
1254  ArenaVector<HBasicBlock*> dominated_blocks_;
1255  uint32_t block_id_;
1256  // The dex program counter of the first instruction of this block.
1257  const uint32_t dex_pc_;
1258  size_t lifetime_start_;
1259  size_t lifetime_end_;
1260  TryCatchInformation* try_catch_information_;
1261
1262  friend class HGraph;
1263  friend class HInstruction;
1264
1265  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1266};
1267
1268// Iterates over the LoopInformation of all loops which contain 'block'
1269// from the innermost to the outermost.
1270class HLoopInformationOutwardIterator : public ValueObject {
1271 public:
1272  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1273      : current_(block.GetLoopInformation()) {}
1274
1275  bool Done() const { return current_ == nullptr; }
1276
1277  void Advance() {
1278    DCHECK(!Done());
1279    current_ = current_->GetPreHeader()->GetLoopInformation();
1280  }
1281
1282  HLoopInformation* Current() const {
1283    DCHECK(!Done());
1284    return current_;
1285  }
1286
1287 private:
1288  HLoopInformation* current_;
1289
1290  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1291};
1292
1293#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1294  M(Above, Condition)                                                   \
1295  M(AboveOrEqual, Condition)                                            \
1296  M(Add, BinaryOperation)                                               \
1297  M(And, BinaryOperation)                                               \
1298  M(ArrayGet, Instruction)                                              \
1299  M(ArrayLength, Instruction)                                           \
1300  M(ArraySet, Instruction)                                              \
1301  M(Below, Condition)                                                   \
1302  M(BelowOrEqual, Condition)                                            \
1303  M(BooleanNot, UnaryOperation)                                         \
1304  M(BoundsCheck, Instruction)                                           \
1305  M(BoundType, Instruction)                                             \
1306  M(CheckCast, Instruction)                                             \
1307  M(ClassTableGet, Instruction)                                         \
1308  M(ClearException, Instruction)                                        \
1309  M(ClinitCheck, Instruction)                                           \
1310  M(Compare, BinaryOperation)                                           \
1311  M(ConstructorFence, Instruction)                                      \
1312  M(CurrentMethod, Instruction)                                         \
1313  M(ShouldDeoptimizeFlag, Instruction)                                  \
1314  M(Deoptimize, Instruction)                                            \
1315  M(Div, BinaryOperation)                                               \
1316  M(DivZeroCheck, Instruction)                                          \
1317  M(DoubleConstant, Constant)                                           \
1318  M(Equal, Condition)                                                   \
1319  M(Exit, Instruction)                                                  \
1320  M(FloatConstant, Constant)                                            \
1321  M(Goto, Instruction)                                                  \
1322  M(GreaterThan, Condition)                                             \
1323  M(GreaterThanOrEqual, Condition)                                      \
1324  M(If, Instruction)                                                    \
1325  M(InstanceFieldGet, Instruction)                                      \
1326  M(InstanceFieldSet, Instruction)                                      \
1327  M(InstanceOf, Instruction)                                            \
1328  M(IntConstant, Constant)                                              \
1329  M(InvokeUnresolved, Invoke)                                           \
1330  M(InvokeInterface, Invoke)                                            \
1331  M(InvokeStaticOrDirect, Invoke)                                       \
1332  M(InvokeVirtual, Invoke)                                              \
1333  M(InvokePolymorphic, Invoke)                                          \
1334  M(LessThan, Condition)                                                \
1335  M(LessThanOrEqual, Condition)                                         \
1336  M(LoadClass, Instruction)                                             \
1337  M(LoadException, Instruction)                                         \
1338  M(LoadString, Instruction)                                            \
1339  M(LongConstant, Constant)                                             \
1340  M(MemoryBarrier, Instruction)                                         \
1341  M(MonitorOperation, Instruction)                                      \
1342  M(Mul, BinaryOperation)                                               \
1343  M(NativeDebugInfo, Instruction)                                       \
1344  M(Neg, UnaryOperation)                                                \
1345  M(NewArray, Instruction)                                              \
1346  M(NewInstance, Instruction)                                           \
1347  M(Not, UnaryOperation)                                                \
1348  M(NotEqual, Condition)                                                \
1349  M(NullConstant, Instruction)                                          \
1350  M(NullCheck, Instruction)                                             \
1351  M(Or, BinaryOperation)                                                \
1352  M(PackedSwitch, Instruction)                                          \
1353  M(ParallelMove, Instruction)                                          \
1354  M(ParameterValue, Instruction)                                        \
1355  M(Phi, Instruction)                                                   \
1356  M(Rem, BinaryOperation)                                               \
1357  M(Return, Instruction)                                                \
1358  M(ReturnVoid, Instruction)                                            \
1359  M(Ror, BinaryOperation)                                               \
1360  M(Shl, BinaryOperation)                                               \
1361  M(Shr, BinaryOperation)                                               \
1362  M(StaticFieldGet, Instruction)                                        \
1363  M(StaticFieldSet, Instruction)                                        \
1364  M(UnresolvedInstanceFieldGet, Instruction)                            \
1365  M(UnresolvedInstanceFieldSet, Instruction)                            \
1366  M(UnresolvedStaticFieldGet, Instruction)                              \
1367  M(UnresolvedStaticFieldSet, Instruction)                              \
1368  M(Select, Instruction)                                                \
1369  M(Sub, BinaryOperation)                                               \
1370  M(SuspendCheck, Instruction)                                          \
1371  M(Throw, Instruction)                                                 \
1372  M(TryBoundary, Instruction)                                           \
1373  M(TypeConversion, Instruction)                                        \
1374  M(UShr, BinaryOperation)                                              \
1375  M(Xor, BinaryOperation)                                               \
1376  M(VecReplicateScalar, VecUnaryOperation)                              \
1377  M(VecExtractScalar, VecUnaryOperation)                                \
1378  M(VecReduce, VecUnaryOperation)                                       \
1379  M(VecCnv, VecUnaryOperation)                                          \
1380  M(VecNeg, VecUnaryOperation)                                          \
1381  M(VecAbs, VecUnaryOperation)                                          \
1382  M(VecNot, VecUnaryOperation)                                          \
1383  M(VecAdd, VecBinaryOperation)                                         \
1384  M(VecHalvingAdd, VecBinaryOperation)                                  \
1385  M(VecSub, VecBinaryOperation)                                         \
1386  M(VecMul, VecBinaryOperation)                                         \
1387  M(VecDiv, VecBinaryOperation)                                         \
1388  M(VecMin, VecBinaryOperation)                                         \
1389  M(VecMax, VecBinaryOperation)                                         \
1390  M(VecAnd, VecBinaryOperation)                                         \
1391  M(VecAndNot, VecBinaryOperation)                                      \
1392  M(VecOr, VecBinaryOperation)                                          \
1393  M(VecXor, VecBinaryOperation)                                         \
1394  M(VecShl, VecBinaryOperation)                                         \
1395  M(VecShr, VecBinaryOperation)                                         \
1396  M(VecUShr, VecBinaryOperation)                                        \
1397  M(VecSetScalars, VecOperation)                                        \
1398  M(VecMultiplyAccumulate, VecOperation)                                \
1399  M(VecSADAccumulate, VecOperation)                                     \
1400  M(VecLoad, VecMemoryOperation)                                        \
1401  M(VecStore, VecMemoryOperation)                                       \
1402
1403/*
1404 * Instructions, shared across several (not all) architectures.
1405 */
1406#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1407#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1408#else
1409#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1410  M(BitwiseNegatedRight, Instruction)                                   \
1411  M(DataProcWithShifterOp, Instruction)                                 \
1412  M(MultiplyAccumulate, Instruction)                                    \
1413  M(IntermediateAddress, Instruction)                                   \
1414  M(IntermediateAddressIndex, Instruction)
1415#endif
1416
1417#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1418
1419#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1420
1421#ifndef ART_ENABLE_CODEGEN_mips
1422#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1423#else
1424#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1425  M(MipsComputeBaseMethodAddress, Instruction)                          \
1426  M(MipsPackedSwitch, Instruction)                                      \
1427  M(IntermediateArrayAddressIndex, Instruction)
1428#endif
1429
1430#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1431
1432#ifndef ART_ENABLE_CODEGEN_x86
1433#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1434#else
1435#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1436  M(X86ComputeBaseMethodAddress, Instruction)                           \
1437  M(X86LoadFromConstantTable, Instruction)                              \
1438  M(X86FPNeg, Instruction)                                              \
1439  M(X86PackedSwitch, Instruction)
1440#endif
1441
1442#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1443
1444#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1445  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1446  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1447  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1448  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1449  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1450  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1451  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1452  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1453
1454#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1455  M(Condition, BinaryOperation)                                         \
1456  M(Constant, Instruction)                                              \
1457  M(UnaryOperation, Instruction)                                        \
1458  M(BinaryOperation, Instruction)                                       \
1459  M(Invoke, Instruction)                                                \
1460  M(VecOperation, Instruction)                                          \
1461  M(VecUnaryOperation, VecOperation)                                    \
1462  M(VecBinaryOperation, VecOperation)                                   \
1463  M(VecMemoryOperation, VecOperation)
1464
1465#define FOR_EACH_INSTRUCTION(M)                                         \
1466  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1467  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1468
1469#define FORWARD_DECLARATION(type, super) class H##type;
1470FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1471#undef FORWARD_DECLARATION
1472
1473#define DECLARE_INSTRUCTION(type)                                         \
1474  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1475  const char* DebugName() const OVERRIDE { return #type; }                \
1476  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1477    return other->Is##type();                                             \
1478  }                                                                       \
1479  void Accept(HGraphVisitor* visitor) OVERRIDE
1480
1481#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1482  bool Is##type() const { return As##type() != nullptr; }               \
1483  const H##type* As##type() const { return this; }                      \
1484  H##type* As##type() { return this; }
1485
1486template <typename T>
1487class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1488                     public IntrusiveForwardListNode<HUseListNode<T>> {
1489 public:
1490  // Get the instruction which has this use as one of the inputs.
1491  T GetUser() const { return user_; }
1492  // Get the position of the input record that this use corresponds to.
1493  size_t GetIndex() const { return index_; }
1494  // Set the position of the input record that this use corresponds to.
1495  void SetIndex(size_t index) { index_ = index; }
1496
1497 private:
1498  HUseListNode(T user, size_t index)
1499      : user_(user), index_(index) {}
1500
1501  T const user_;
1502  size_t index_;
1503
1504  friend class HInstruction;
1505
1506  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1507};
1508
1509template <typename T>
1510using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1511
1512// This class is used by HEnvironment and HInstruction classes to record the
1513// instructions they use and pointers to the corresponding HUseListNodes kept
1514// by the used instructions.
1515template <typename T>
1516class HUserRecord : public ValueObject {
1517 public:
1518  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1519  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1520
1521  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1522      : HUserRecord(old_record.instruction_, before_use_node) {}
1523  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1524      : instruction_(instruction), before_use_node_(before_use_node) {
1525    DCHECK(instruction_ != nullptr);
1526  }
1527
1528  HInstruction* GetInstruction() const { return instruction_; }
1529  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1530  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1531
1532 private:
1533  // Instruction used by the user.
1534  HInstruction* instruction_;
1535
1536  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1537  typename HUseList<T>::iterator before_use_node_;
1538};
1539
1540// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1541// This is used for HInstruction::GetInputs() to return a container wrapper providing
1542// HInstruction* values even though the underlying container has HUserRecord<>s.
1543struct HInputExtractor {
1544  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1545    return record.GetInstruction();
1546  }
1547  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1548    return record.GetInstruction();
1549  }
1550};
1551
1552using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1553using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1554
1555/**
1556 * Side-effects representation.
1557 *
1558 * For write/read dependences on fields/arrays, the dependence analysis uses
1559 * type disambiguation (e.g. a float field write cannot modify the value of an
1560 * integer field read) and the access type (e.g.  a reference array write cannot
1561 * modify the value of a reference field read [although it may modify the
1562 * reference fetch prior to reading the field, which is represented by its own
1563 * write/read dependence]). The analysis makes conservative points-to
1564 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1565 * the same, and any reference read depends on any reference read without
1566 * further regard of its type).
1567 *
1568 * The internal representation uses 38-bit and is described in the table below.
1569 * The first line indicates the side effect, and for field/array accesses the
1570 * second line indicates the type of the access (in the order of the
1571 * DataType::Type enum).
1572 * The two numbered lines below indicate the bit position in the bitfield (read
1573 * vertically).
1574 *
1575 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1576 *   +-------------+---------+---------+--------------+---------+---------+
1577 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1578 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1579 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1580 *
1581 * Note that, to ease the implementation, 'changes' bits are least significant
1582 * bits, while 'dependency' bits are most significant bits.
1583 */
1584class SideEffects : public ValueObject {
1585 public:
1586  SideEffects() : flags_(0) {}
1587
1588  static SideEffects None() {
1589    return SideEffects(0);
1590  }
1591
1592  static SideEffects All() {
1593    return SideEffects(kAllChangeBits | kAllDependOnBits);
1594  }
1595
1596  static SideEffects AllChanges() {
1597    return SideEffects(kAllChangeBits);
1598  }
1599
1600  static SideEffects AllDependencies() {
1601    return SideEffects(kAllDependOnBits);
1602  }
1603
1604  static SideEffects AllExceptGCDependency() {
1605    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1606  }
1607
1608  static SideEffects AllWritesAndReads() {
1609    return SideEffects(kAllWrites | kAllReads);
1610  }
1611
1612  static SideEffects AllWrites() {
1613    return SideEffects(kAllWrites);
1614  }
1615
1616  static SideEffects AllReads() {
1617    return SideEffects(kAllReads);
1618  }
1619
1620  static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1621    return is_volatile
1622        ? AllWritesAndReads()
1623        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1624  }
1625
1626  static SideEffects ArrayWriteOfType(DataType::Type type) {
1627    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1628  }
1629
1630  static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1631    return is_volatile
1632        ? AllWritesAndReads()
1633        : SideEffects(TypeFlag(type, kFieldReadOffset));
1634  }
1635
1636  static SideEffects ArrayReadOfType(DataType::Type type) {
1637    return SideEffects(TypeFlag(type, kArrayReadOffset));
1638  }
1639
1640  static SideEffects CanTriggerGC() {
1641    return SideEffects(1ULL << kCanTriggerGCBit);
1642  }
1643
1644  static SideEffects DependsOnGC() {
1645    return SideEffects(1ULL << kDependsOnGCBit);
1646  }
1647
1648  // Combines the side-effects of this and the other.
1649  SideEffects Union(SideEffects other) const {
1650    return SideEffects(flags_ | other.flags_);
1651  }
1652
1653  SideEffects Exclusion(SideEffects other) const {
1654    return SideEffects(flags_ & ~other.flags_);
1655  }
1656
1657  void Add(SideEffects other) {
1658    flags_ |= other.flags_;
1659  }
1660
1661  bool Includes(SideEffects other) const {
1662    return (other.flags_ & flags_) == other.flags_;
1663  }
1664
1665  bool HasSideEffects() const {
1666    return (flags_ & kAllChangeBits);
1667  }
1668
1669  bool HasDependencies() const {
1670    return (flags_ & kAllDependOnBits);
1671  }
1672
1673  // Returns true if there are no side effects or dependencies.
1674  bool DoesNothing() const {
1675    return flags_ == 0;
1676  }
1677
1678  // Returns true if something is written.
1679  bool DoesAnyWrite() const {
1680    return (flags_ & kAllWrites);
1681  }
1682
1683  // Returns true if something is read.
1684  bool DoesAnyRead() const {
1685    return (flags_ & kAllReads);
1686  }
1687
1688  // Returns true if potentially everything is written and read
1689  // (every type and every kind of access).
1690  bool DoesAllReadWrite() const {
1691    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1692  }
1693
1694  bool DoesAll() const {
1695    return flags_ == (kAllChangeBits | kAllDependOnBits);
1696  }
1697
1698  // Returns true if `this` may read something written by `other`.
1699  bool MayDependOn(SideEffects other) const {
1700    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1701    return (other.flags_ & depends_on_flags);
1702  }
1703
1704  // Returns string representation of flags (for debugging only).
1705  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1706  std::string ToString() const {
1707    std::string flags = "|";
1708    for (int s = kLastBit; s >= 0; s--) {
1709      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1710      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1711        // This is a bit for the GC side effect.
1712        if (current_bit_is_set) {
1713          flags += "GC";
1714        }
1715        flags += "|";
1716      } else {
1717        // This is a bit for the array/field analysis.
1718        // The underscore character stands for the 'can trigger GC' bit.
1719        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1720        if (current_bit_is_set) {
1721          flags += kDebug[s];
1722        }
1723        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1724            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1725          flags += "|";
1726        }
1727      }
1728    }
1729    return flags;
1730  }
1731
1732  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1733
1734 private:
1735  static constexpr int kFieldArrayAnalysisBits = 9;
1736
1737  static constexpr int kFieldWriteOffset = 0;
1738  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1739  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1740  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1741
1742  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1743
1744  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1745  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1746  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1747  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1748
1749  static constexpr int kLastBit = kDependsOnGCBit;
1750  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1751
1752  // Aliases.
1753
1754  static_assert(kChangeBits == kDependOnBits,
1755                "the 'change' bits should match the 'depend on' bits.");
1756
1757  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1758  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1759  static constexpr uint64_t kAllWrites =
1760      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1761  static constexpr uint64_t kAllReads =
1762      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1763
1764  // Translates type to bit flag. The type must correspond to a Java type.
1765  static uint64_t TypeFlag(DataType::Type type, int offset) {
1766    int shift;
1767    switch (type) {
1768      case DataType::Type::kReference: shift = 0; break;
1769      case DataType::Type::kBool:      shift = 1; break;
1770      case DataType::Type::kInt8:      shift = 2; break;
1771      case DataType::Type::kUint16:    shift = 3; break;
1772      case DataType::Type::kInt16:     shift = 4; break;
1773      case DataType::Type::kInt32:     shift = 5; break;
1774      case DataType::Type::kInt64:     shift = 6; break;
1775      case DataType::Type::kFloat32:   shift = 7; break;
1776      case DataType::Type::kFloat64:   shift = 8; break;
1777      default:
1778        LOG(FATAL) << "Unexpected data type " << type;
1779        UNREACHABLE();
1780    }
1781    DCHECK_LE(kFieldWriteOffset, shift);
1782    DCHECK_LT(shift, kArrayWriteOffset);
1783    return UINT64_C(1) << (shift + offset);
1784  }
1785
1786  // Private constructor on direct flags value.
1787  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1788
1789  uint64_t flags_;
1790};
1791
1792// A HEnvironment object contains the values of virtual registers at a given location.
1793class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1794 public:
1795  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1796                             size_t number_of_vregs,
1797                             ArtMethod* method,
1798                             uint32_t dex_pc,
1799                             HInstruction* holder)
1800     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1801       locations_(arena->Adapter(kArenaAllocEnvironmentLocations)),
1802       parent_(nullptr),
1803       method_(method),
1804       dex_pc_(dex_pc),
1805       holder_(holder) {
1806  }
1807
1808  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1809      : HEnvironment(arena,
1810                     to_copy.Size(),
1811                     to_copy.GetMethod(),
1812                     to_copy.GetDexPc(),
1813                     holder) {}
1814
1815  void AllocateLocations() {
1816    DCHECK(locations_.empty());
1817    locations_.resize(vregs_.size());
1818  }
1819
1820  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1821    if (parent_ != nullptr) {
1822      parent_->SetAndCopyParentChain(allocator, parent);
1823    } else {
1824      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1825      parent_->CopyFrom(parent);
1826      if (parent->GetParent() != nullptr) {
1827        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1828      }
1829    }
1830  }
1831
1832  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1833  void CopyFrom(HEnvironment* environment);
1834
1835  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1836  // input to the loop phi instead. This is for inserting instructions that
1837  // require an environment (like HDeoptimization) in the loop pre-header.
1838  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1839
1840  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1841    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1842  }
1843
1844  HInstruction* GetInstructionAt(size_t index) const {
1845    return vregs_[index].GetInstruction();
1846  }
1847
1848  void RemoveAsUserOfInput(size_t index) const;
1849
1850  size_t Size() const { return vregs_.size(); }
1851
1852  HEnvironment* GetParent() const { return parent_; }
1853
1854  void SetLocationAt(size_t index, Location location) {
1855    locations_[index] = location;
1856  }
1857
1858  Location GetLocationAt(size_t index) const {
1859    return locations_[index];
1860  }
1861
1862  uint32_t GetDexPc() const {
1863    return dex_pc_;
1864  }
1865
1866  ArtMethod* GetMethod() const {
1867    return method_;
1868  }
1869
1870  HInstruction* GetHolder() const {
1871    return holder_;
1872  }
1873
1874
1875  bool IsFromInlinedInvoke() const {
1876    return GetParent() != nullptr;
1877  }
1878
1879 private:
1880  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1881  ArenaVector<Location> locations_;
1882  HEnvironment* parent_;
1883  ArtMethod* method_;
1884  const uint32_t dex_pc_;
1885
1886  // The instruction that holds this environment.
1887  HInstruction* const holder_;
1888
1889  friend class HInstruction;
1890
1891  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1892};
1893
1894class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1895 public:
1896  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1897      : previous_(nullptr),
1898        next_(nullptr),
1899        block_(nullptr),
1900        dex_pc_(dex_pc),
1901        id_(-1),
1902        ssa_index_(-1),
1903        packed_fields_(0u),
1904        environment_(nullptr),
1905        locations_(nullptr),
1906        live_interval_(nullptr),
1907        lifetime_position_(kNoLifetime),
1908        side_effects_(side_effects),
1909        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1910    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1911  }
1912
1913  virtual ~HInstruction() {}
1914
1915#define DECLARE_KIND(type, super) k##type,
1916  enum InstructionKind {
1917    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1918  };
1919#undef DECLARE_KIND
1920
1921  HInstruction* GetNext() const { return next_; }
1922  HInstruction* GetPrevious() const { return previous_; }
1923
1924  HInstruction* GetNextDisregardingMoves() const;
1925  HInstruction* GetPreviousDisregardingMoves() const;
1926
1927  HBasicBlock* GetBlock() const { return block_; }
1928  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1929  void SetBlock(HBasicBlock* block) { block_ = block; }
1930  bool IsInBlock() const { return block_ != nullptr; }
1931  bool IsInLoop() const { return block_->IsInLoop(); }
1932  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1933  bool IsIrreducibleLoopHeaderPhi() const {
1934    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1935  }
1936
1937  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1938
1939  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1940    // One virtual method is enough, just const_cast<> and then re-add the const.
1941    return ArrayRef<const HUserRecord<HInstruction*>>(
1942        const_cast<HInstruction*>(this)->GetInputRecords());
1943  }
1944
1945  HInputsRef GetInputs() {
1946    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1947  }
1948
1949  HConstInputsRef GetInputs() const {
1950    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1951  }
1952
1953  size_t InputCount() const { return GetInputRecords().size(); }
1954  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1955
1956  bool HasInput(HInstruction* input) const {
1957    for (const HInstruction* i : GetInputs()) {
1958      if (i == input) {
1959        return true;
1960      }
1961    }
1962    return false;
1963  }
1964
1965  void SetRawInputAt(size_t index, HInstruction* input) {
1966    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1967  }
1968
1969  virtual void Accept(HGraphVisitor* visitor) = 0;
1970  virtual const char* DebugName() const = 0;
1971
1972  virtual DataType::Type GetType() const { return DataType::Type::kVoid; }
1973
1974  virtual bool NeedsEnvironment() const { return false; }
1975
1976  uint32_t GetDexPc() const { return dex_pc_; }
1977
1978  virtual bool IsControlFlow() const { return false; }
1979
1980  // Can the instruction throw?
1981  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1982  // could throw OOME, but it is still OK to remove them if they are unused.
1983  virtual bool CanThrow() const { return false; }
1984  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1985
1986  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1987  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1988
1989  // Does not apply for all instructions, but having this at top level greatly
1990  // simplifies the null check elimination.
1991  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1992  virtual bool CanBeNull() const {
1993    DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
1994    return true;
1995  }
1996
1997  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1998    return false;
1999  }
2000
2001  virtual bool IsActualObject() const {
2002    return GetType() == DataType::Type::kReference;
2003  }
2004
2005  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2006
2007  ReferenceTypeInfo GetReferenceTypeInfo() const {
2008    DCHECK_EQ(GetType(), DataType::Type::kReference);
2009    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2010                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
2011  }
2012
2013  void AddUseAt(HInstruction* user, size_t index) {
2014    DCHECK(user != nullptr);
2015    // Note: fixup_end remains valid across push_front().
2016    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2017    HUseListNode<HInstruction*>* new_node =
2018        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
2019    uses_.push_front(*new_node);
2020    FixUpUserRecordsAfterUseInsertion(fixup_end);
2021  }
2022
2023  void AddEnvUseAt(HEnvironment* user, size_t index) {
2024    DCHECK(user != nullptr);
2025    // Note: env_fixup_end remains valid across push_front().
2026    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2027    HUseListNode<HEnvironment*>* new_node =
2028        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2029    env_uses_.push_front(*new_node);
2030    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2031  }
2032
2033  void RemoveAsUserOfInput(size_t input) {
2034    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2035    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2036    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2037    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2038  }
2039
2040  void RemoveAsUserOfAllInputs() {
2041    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2042      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2043      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2044      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2045    }
2046  }
2047
2048  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2049  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2050
2051  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2052  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2053  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2054  bool HasOnlyOneNonEnvironmentUse() const {
2055    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2056  }
2057
2058  bool IsRemovable() const {
2059    return
2060        !DoesAnyWrite() &&
2061        !CanThrow() &&
2062        !IsSuspendCheck() &&
2063        !IsControlFlow() &&
2064        !IsNativeDebugInfo() &&
2065        !IsParameterValue() &&
2066        // If we added an explicit barrier then we should keep it.
2067        !IsMemoryBarrier() &&
2068        !IsConstructorFence();
2069  }
2070
2071  bool IsDeadAndRemovable() const {
2072    return IsRemovable() && !HasUses();
2073  }
2074
2075  // Does this instruction strictly dominate `other_instruction`?
2076  // Returns false if this instruction and `other_instruction` are the same.
2077  // Aborts if this instruction and `other_instruction` are both phis.
2078  bool StrictlyDominates(HInstruction* other_instruction) const;
2079
2080  int GetId() const { return id_; }
2081  void SetId(int id) { id_ = id; }
2082
2083  int GetSsaIndex() const { return ssa_index_; }
2084  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2085  bool HasSsaIndex() const { return ssa_index_ != -1; }
2086
2087  bool HasEnvironment() const { return environment_ != nullptr; }
2088  HEnvironment* GetEnvironment() const { return environment_; }
2089  // Set the `environment_` field. Raw because this method does not
2090  // update the uses lists.
2091  void SetRawEnvironment(HEnvironment* environment) {
2092    DCHECK(environment_ == nullptr);
2093    DCHECK_EQ(environment->GetHolder(), this);
2094    environment_ = environment;
2095  }
2096
2097  void InsertRawEnvironment(HEnvironment* environment) {
2098    DCHECK(environment_ != nullptr);
2099    DCHECK_EQ(environment->GetHolder(), this);
2100    DCHECK(environment->GetParent() == nullptr);
2101    environment->parent_ = environment_;
2102    environment_ = environment;
2103  }
2104
2105  void RemoveEnvironment();
2106
2107  // Set the environment of this instruction, copying it from `environment`. While
2108  // copying, the uses lists are being updated.
2109  void CopyEnvironmentFrom(HEnvironment* environment) {
2110    DCHECK(environment_ == nullptr);
2111    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2112    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2113    environment_->CopyFrom(environment);
2114    if (environment->GetParent() != nullptr) {
2115      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2116    }
2117  }
2118
2119  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2120                                                HBasicBlock* block) {
2121    DCHECK(environment_ == nullptr);
2122    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2123    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2124    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2125    if (environment->GetParent() != nullptr) {
2126      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2127    }
2128  }
2129
2130  // Returns the number of entries in the environment. Typically, that is the
2131  // number of dex registers in a method. It could be more in case of inlining.
2132  size_t EnvironmentSize() const;
2133
2134  LocationSummary* GetLocations() const { return locations_; }
2135  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2136
2137  void ReplaceWith(HInstruction* instruction);
2138  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2139  void ReplaceInput(HInstruction* replacement, size_t index);
2140
2141  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2142  // uses of this instruction by `other` are *not* updated.
2143  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2144    ReplaceWith(other);
2145    other->ReplaceInput(this, use_index);
2146  }
2147
2148  // Move `this` instruction before `cursor`
2149  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2150
2151  // Move `this` before its first user and out of any loops. If there is no
2152  // out-of-loop user that dominates all other users, move the instruction
2153  // to the end of the out-of-loop common dominator of the user's blocks.
2154  //
2155  // This can be used only on non-throwing instructions with no side effects that
2156  // have at least one use but no environment uses.
2157  void MoveBeforeFirstUserAndOutOfLoops();
2158
2159#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2160  bool Is##type() const;                                                       \
2161  const H##type* As##type() const;                                             \
2162  H##type* As##type();
2163
2164  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2165#undef INSTRUCTION_TYPE_CHECK
2166
2167#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2168  bool Is##type() const { return (As##type() != nullptr); }                    \
2169  virtual const H##type* As##type() const { return nullptr; }                  \
2170  virtual H##type* As##type() { return nullptr; }
2171  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2172#undef INSTRUCTION_TYPE_CHECK
2173
2174  // Returns whether the instruction can be moved within the graph.
2175  // TODO: this method is used by LICM and GVN with possibly different
2176  //       meanings? split and rename?
2177  virtual bool CanBeMoved() const { return false; }
2178
2179  // Returns whether the two instructions are of the same kind.
2180  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2181    return false;
2182  }
2183
2184  // Returns whether any data encoded in the two instructions is equal.
2185  // This method does not look at the inputs. Both instructions must be
2186  // of the same type, otherwise the method has undefined behavior.
2187  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2188    return false;
2189  }
2190
2191  // Returns whether two instructions are equal, that is:
2192  // 1) They have the same type and contain the same data (InstructionDataEquals).
2193  // 2) Their inputs are identical.
2194  bool Equals(const HInstruction* other) const;
2195
2196  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2197  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2198  // the virtual function because the __attribute__((__pure__)) doesn't really
2199  // apply the strong requirement for virtual functions, preventing optimizations.
2200  InstructionKind GetKind() const PURE;
2201  virtual InstructionKind GetKindInternal() const = 0;
2202
2203  virtual size_t ComputeHashCode() const {
2204    size_t result = GetKind();
2205    for (const HInstruction* input : GetInputs()) {
2206      result = (result * 31) + input->GetId();
2207    }
2208    return result;
2209  }
2210
2211  SideEffects GetSideEffects() const { return side_effects_; }
2212  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2213  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2214
2215  size_t GetLifetimePosition() const { return lifetime_position_; }
2216  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2217  LiveInterval* GetLiveInterval() const { return live_interval_; }
2218  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2219  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2220
2221  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2222
2223  // Returns whether the code generation of the instruction will require to have access
2224  // to the current method. Such instructions are:
2225  // (1): Instructions that require an environment, as calling the runtime requires
2226  //      to walk the stack and have the current method stored at a specific stack address.
2227  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2228  //      to access the dex cache.
2229  bool NeedsCurrentMethod() const {
2230    return NeedsEnvironment() || IsCurrentMethod();
2231  }
2232
2233  // Returns whether the code generation of the instruction will require to have access
2234  // to the dex cache of the current method's declaring class via the current method.
2235  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2236
2237  // Does this instruction have any use in an environment before
2238  // control flow hits 'other'?
2239  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2240
2241  // Remove all references to environment uses of this instruction.
2242  // The caller must ensure that this is safe to do.
2243  void RemoveEnvironmentUsers();
2244
2245  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2246  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2247
2248 protected:
2249  // If set, the machine code for this instruction is assumed to be generated by
2250  // its users. Used by liveness analysis to compute use positions accordingly.
2251  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2252  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2253  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2254  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2255
2256  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2257    return GetInputRecords()[i];
2258  }
2259
2260  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2261    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2262    input_records[index] = input;
2263  }
2264
2265  uint32_t GetPackedFields() const {
2266    return packed_fields_;
2267  }
2268
2269  template <size_t flag>
2270  bool GetPackedFlag() const {
2271    return (packed_fields_ & (1u << flag)) != 0u;
2272  }
2273
2274  template <size_t flag>
2275  void SetPackedFlag(bool value = true) {
2276    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2277  }
2278
2279  template <typename BitFieldType>
2280  typename BitFieldType::value_type GetPackedField() const {
2281    return BitFieldType::Decode(packed_fields_);
2282  }
2283
2284  template <typename BitFieldType>
2285  void SetPackedField(typename BitFieldType::value_type value) {
2286    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2287    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2288  }
2289
2290 private:
2291  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2292    auto before_use_node = uses_.before_begin();
2293    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2294      HInstruction* user = use_node->GetUser();
2295      size_t input_index = use_node->GetIndex();
2296      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2297      before_use_node = use_node;
2298    }
2299  }
2300
2301  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2302    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2303    if (next != uses_.end()) {
2304      HInstruction* next_user = next->GetUser();
2305      size_t next_index = next->GetIndex();
2306      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2307      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2308    }
2309  }
2310
2311  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2312    auto before_env_use_node = env_uses_.before_begin();
2313    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2314      HEnvironment* user = env_use_node->GetUser();
2315      size_t input_index = env_use_node->GetIndex();
2316      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2317      before_env_use_node = env_use_node;
2318    }
2319  }
2320
2321  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2322    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2323    if (next != env_uses_.end()) {
2324      HEnvironment* next_user = next->GetUser();
2325      size_t next_index = next->GetIndex();
2326      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2327      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2328    }
2329  }
2330
2331  HInstruction* previous_;
2332  HInstruction* next_;
2333  HBasicBlock* block_;
2334  const uint32_t dex_pc_;
2335
2336  // An instruction gets an id when it is added to the graph.
2337  // It reflects creation order. A negative id means the instruction
2338  // has not been added to the graph.
2339  int id_;
2340
2341  // When doing liveness analysis, instructions that have uses get an SSA index.
2342  int ssa_index_;
2343
2344  // Packed fields.
2345  uint32_t packed_fields_;
2346
2347  // List of instructions that have this instruction as input.
2348  HUseList<HInstruction*> uses_;
2349
2350  // List of environments that contain this instruction.
2351  HUseList<HEnvironment*> env_uses_;
2352
2353  // The environment associated with this instruction. Not null if the instruction
2354  // might jump out of the method.
2355  HEnvironment* environment_;
2356
2357  // Set by the code generator.
2358  LocationSummary* locations_;
2359
2360  // Set by the liveness analysis.
2361  LiveInterval* live_interval_;
2362
2363  // Set by the liveness analysis, this is the position in a linear
2364  // order of blocks where this instruction's live interval start.
2365  size_t lifetime_position_;
2366
2367  SideEffects side_effects_;
2368
2369  // The reference handle part of the reference type info.
2370  // The IsExact() flag is stored in packed fields.
2371  // TODO: for primitive types this should be marked as invalid.
2372  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2373
2374  friend class GraphChecker;
2375  friend class HBasicBlock;
2376  friend class HEnvironment;
2377  friend class HGraph;
2378  friend class HInstructionList;
2379
2380  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2381};
2382std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2383
2384// Iterates over the instructions, while preserving the next instruction
2385// in case the current instruction gets removed from the list by the user
2386// of this iterator.
2387class HInstructionIterator : public ValueObject {
2388 public:
2389  explicit HInstructionIterator(const HInstructionList& instructions)
2390      : instruction_(instructions.first_instruction_) {
2391    next_ = Done() ? nullptr : instruction_->GetNext();
2392  }
2393
2394  bool Done() const { return instruction_ == nullptr; }
2395  HInstruction* Current() const { return instruction_; }
2396  void Advance() {
2397    instruction_ = next_;
2398    next_ = Done() ? nullptr : instruction_->GetNext();
2399  }
2400
2401 private:
2402  HInstruction* instruction_;
2403  HInstruction* next_;
2404
2405  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2406};
2407
2408// Iterates over the instructions without saving the next instruction,
2409// therefore handling changes in the graph potentially made by the user
2410// of this iterator.
2411class HInstructionIteratorHandleChanges : public ValueObject {
2412 public:
2413  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2414      : instruction_(instructions.first_instruction_) {
2415  }
2416
2417  bool Done() const { return instruction_ == nullptr; }
2418  HInstruction* Current() const { return instruction_; }
2419  void Advance() {
2420    instruction_ = instruction_->GetNext();
2421  }
2422
2423 private:
2424  HInstruction* instruction_;
2425
2426  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2427};
2428
2429
2430class HBackwardInstructionIterator : public ValueObject {
2431 public:
2432  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2433      : instruction_(instructions.last_instruction_) {
2434    next_ = Done() ? nullptr : instruction_->GetPrevious();
2435  }
2436
2437  bool Done() const { return instruction_ == nullptr; }
2438  HInstruction* Current() const { return instruction_; }
2439  void Advance() {
2440    instruction_ = next_;
2441    next_ = Done() ? nullptr : instruction_->GetPrevious();
2442  }
2443
2444 private:
2445  HInstruction* instruction_;
2446  HInstruction* next_;
2447
2448  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2449};
2450
2451class HVariableInputSizeInstruction : public HInstruction {
2452 public:
2453  using HInstruction::GetInputRecords;  // Keep the const version visible.
2454  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2455    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2456  }
2457
2458  void AddInput(HInstruction* input);
2459  void InsertInputAt(size_t index, HInstruction* input);
2460  void RemoveInputAt(size_t index);
2461
2462  // Removes all the inputs.
2463  // Also removes this instructions from each input's use list
2464  // (for non-environment uses only).
2465  void RemoveAllInputs();
2466
2467 protected:
2468  HVariableInputSizeInstruction(SideEffects side_effects,
2469                                uint32_t dex_pc,
2470                                ArenaAllocator* arena,
2471                                size_t number_of_inputs,
2472                                ArenaAllocKind kind)
2473      : HInstruction(side_effects, dex_pc),
2474        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2475
2476  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2477
2478 private:
2479  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2480};
2481
2482template<size_t N>
2483class HTemplateInstruction: public HInstruction {
2484 public:
2485  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2486      : HInstruction(side_effects, dex_pc), inputs_() {}
2487  virtual ~HTemplateInstruction() {}
2488
2489  using HInstruction::GetInputRecords;  // Keep the const version visible.
2490  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2491    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2492  }
2493
2494 private:
2495  std::array<HUserRecord<HInstruction*>, N> inputs_;
2496
2497  friend class SsaBuilder;
2498};
2499
2500// HTemplateInstruction specialization for N=0.
2501template<>
2502class HTemplateInstruction<0>: public HInstruction {
2503 public:
2504  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2505      : HInstruction(side_effects, dex_pc) {}
2506
2507  virtual ~HTemplateInstruction() {}
2508
2509  using HInstruction::GetInputRecords;  // Keep the const version visible.
2510  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2511    return ArrayRef<HUserRecord<HInstruction*>>();
2512  }
2513
2514 private:
2515  friend class SsaBuilder;
2516};
2517
2518template<intptr_t N>
2519class HExpression : public HTemplateInstruction<N> {
2520 public:
2521  HExpression<N>(DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2522      : HTemplateInstruction<N>(side_effects, dex_pc) {
2523    this->template SetPackedField<TypeField>(type);
2524  }
2525  virtual ~HExpression() {}
2526
2527  DataType::Type GetType() const OVERRIDE {
2528    return TypeField::Decode(this->GetPackedFields());
2529  }
2530
2531 protected:
2532  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2533  static constexpr size_t kFieldTypeSize =
2534      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2535  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2536  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2537                "Too many packed fields.");
2538  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2539};
2540
2541// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2542// instruction that branches to the exit block.
2543class HReturnVoid FINAL : public HTemplateInstruction<0> {
2544 public:
2545  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2546      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2547
2548  bool IsControlFlow() const OVERRIDE { return true; }
2549
2550  DECLARE_INSTRUCTION(ReturnVoid);
2551
2552 private:
2553  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2554};
2555
2556// Represents dex's RETURN opcodes. A HReturn is a control flow
2557// instruction that branches to the exit block.
2558class HReturn FINAL : public HTemplateInstruction<1> {
2559 public:
2560  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2561      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2562    SetRawInputAt(0, value);
2563  }
2564
2565  bool IsControlFlow() const OVERRIDE { return true; }
2566
2567  DECLARE_INSTRUCTION(Return);
2568
2569 private:
2570  DISALLOW_COPY_AND_ASSIGN(HReturn);
2571};
2572
2573class HPhi FINAL : public HVariableInputSizeInstruction {
2574 public:
2575  HPhi(ArenaAllocator* arena,
2576       uint32_t reg_number,
2577       size_t number_of_inputs,
2578       DataType::Type type,
2579       uint32_t dex_pc = kNoDexPc)
2580      : HVariableInputSizeInstruction(
2581            SideEffects::None(),
2582            dex_pc,
2583            arena,
2584            number_of_inputs,
2585            kArenaAllocPhiInputs),
2586        reg_number_(reg_number) {
2587    SetPackedField<TypeField>(ToPhiType(type));
2588    DCHECK_NE(GetType(), DataType::Type::kVoid);
2589    // Phis are constructed live and marked dead if conflicting or unused.
2590    // Individual steps of SsaBuilder should assume that if a phi has been
2591    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2592    SetPackedFlag<kFlagIsLive>(true);
2593    SetPackedFlag<kFlagCanBeNull>(true);
2594  }
2595
2596  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2597  static DataType::Type ToPhiType(DataType::Type type) {
2598    return DataType::Kind(type);
2599  }
2600
2601  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2602
2603  DataType::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2604  void SetType(DataType::Type new_type) {
2605    // Make sure that only valid type changes occur. The following are allowed:
2606    //  (1) int  -> float/ref (primitive type propagation),
2607    //  (2) long -> double (primitive type propagation).
2608    DCHECK(GetType() == new_type ||
2609           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2610           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2611           (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2612    SetPackedField<TypeField>(new_type);
2613  }
2614
2615  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2616  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2617
2618  uint32_t GetRegNumber() const { return reg_number_; }
2619
2620  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2621  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2622  bool IsDead() const { return !IsLive(); }
2623  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2624
2625  bool IsVRegEquivalentOf(const HInstruction* other) const {
2626    return other != nullptr
2627        && other->IsPhi()
2628        && other->AsPhi()->GetBlock() == GetBlock()
2629        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2630  }
2631
2632  bool HasEquivalentPhi() const {
2633    if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2634      return true;
2635    }
2636    if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2637      return true;
2638    }
2639    return false;
2640  }
2641
2642  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2643  // An equivalent phi is a phi having the same dex register and type.
2644  // It assumes that phis with the same dex register are adjacent.
2645  HPhi* GetNextEquivalentPhiWithSameType() {
2646    HInstruction* next = GetNext();
2647    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2648      if (next->GetType() == GetType()) {
2649        return next->AsPhi();
2650      }
2651      next = next->GetNext();
2652    }
2653    return nullptr;
2654  }
2655
2656  DECLARE_INSTRUCTION(Phi);
2657
2658 private:
2659  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2660  static constexpr size_t kFieldTypeSize =
2661      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2662  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2663  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2664  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2665  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2666  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2667
2668  const uint32_t reg_number_;
2669
2670  DISALLOW_COPY_AND_ASSIGN(HPhi);
2671};
2672
2673// The exit instruction is the only instruction of the exit block.
2674// Instructions aborting the method (HThrow and HReturn) must branch to the
2675// exit block.
2676class HExit FINAL : public HTemplateInstruction<0> {
2677 public:
2678  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2679
2680  bool IsControlFlow() const OVERRIDE { return true; }
2681
2682  DECLARE_INSTRUCTION(Exit);
2683
2684 private:
2685  DISALLOW_COPY_AND_ASSIGN(HExit);
2686};
2687
2688// Jumps from one block to another.
2689class HGoto FINAL : public HTemplateInstruction<0> {
2690 public:
2691  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2692
2693  bool IsControlFlow() const OVERRIDE { return true; }
2694
2695  HBasicBlock* GetSuccessor() const {
2696    return GetBlock()->GetSingleSuccessor();
2697  }
2698
2699  DECLARE_INSTRUCTION(Goto);
2700
2701 private:
2702  DISALLOW_COPY_AND_ASSIGN(HGoto);
2703};
2704
2705class HConstant : public HExpression<0> {
2706 public:
2707  explicit HConstant(DataType::Type type, uint32_t dex_pc = kNoDexPc)
2708      : HExpression(type, SideEffects::None(), dex_pc) {}
2709
2710  bool CanBeMoved() const OVERRIDE { return true; }
2711
2712  // Is this constant -1 in the arithmetic sense?
2713  virtual bool IsMinusOne() const { return false; }
2714  // Is this constant 0 in the arithmetic sense?
2715  virtual bool IsArithmeticZero() const { return false; }
2716  // Is this constant a 0-bit pattern?
2717  virtual bool IsZeroBitPattern() const { return false; }
2718  // Is this constant 1 in the arithmetic sense?
2719  virtual bool IsOne() const { return false; }
2720
2721  virtual uint64_t GetValueAsUint64() const = 0;
2722
2723  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2724
2725 private:
2726  DISALLOW_COPY_AND_ASSIGN(HConstant);
2727};
2728
2729class HNullConstant FINAL : public HConstant {
2730 public:
2731  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2732    return true;
2733  }
2734
2735  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2736
2737  size_t ComputeHashCode() const OVERRIDE { return 0; }
2738
2739  // The null constant representation is a 0-bit pattern.
2740  virtual bool IsZeroBitPattern() const { return true; }
2741
2742  DECLARE_INSTRUCTION(NullConstant);
2743
2744 private:
2745  explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
2746      : HConstant(DataType::Type::kReference, dex_pc) {}
2747
2748  friend class HGraph;
2749  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2750};
2751
2752// Constants of the type int. Those can be from Dex instructions, or
2753// synthesized (for example with the if-eqz instruction).
2754class HIntConstant FINAL : public HConstant {
2755 public:
2756  int32_t GetValue() const { return value_; }
2757
2758  uint64_t GetValueAsUint64() const OVERRIDE {
2759    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2760  }
2761
2762  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2763    DCHECK(other->IsIntConstant()) << other->DebugName();
2764    return other->AsIntConstant()->value_ == value_;
2765  }
2766
2767  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2768
2769  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2770  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2771  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2772  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2773
2774  // Integer constants are used to encode Boolean values as well,
2775  // where 1 means true and 0 means false.
2776  bool IsTrue() const { return GetValue() == 1; }
2777  bool IsFalse() const { return GetValue() == 0; }
2778
2779  DECLARE_INSTRUCTION(IntConstant);
2780
2781 private:
2782  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2783      : HConstant(DataType::Type::kInt32, dex_pc), value_(value) {}
2784  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2785      : HConstant(DataType::Type::kInt32, dex_pc), value_(value ? 1 : 0) {}
2786
2787  const int32_t value_;
2788
2789  friend class HGraph;
2790  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2791  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2792  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2793};
2794
2795class HLongConstant FINAL : public HConstant {
2796 public:
2797  int64_t GetValue() const { return value_; }
2798
2799  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2800
2801  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2802    DCHECK(other->IsLongConstant()) << other->DebugName();
2803    return other->AsLongConstant()->value_ == value_;
2804  }
2805
2806  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2807
2808  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2809  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2810  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2811  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2812
2813  DECLARE_INSTRUCTION(LongConstant);
2814
2815 private:
2816  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2817      : HConstant(DataType::Type::kInt64, dex_pc), value_(value) {}
2818
2819  const int64_t value_;
2820
2821  friend class HGraph;
2822  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2823};
2824
2825class HFloatConstant FINAL : public HConstant {
2826 public:
2827  float GetValue() const { return value_; }
2828
2829  uint64_t GetValueAsUint64() const OVERRIDE {
2830    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2831  }
2832
2833  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2834    DCHECK(other->IsFloatConstant()) << other->DebugName();
2835    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2836  }
2837
2838  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2839
2840  bool IsMinusOne() const OVERRIDE {
2841    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2842  }
2843  bool IsArithmeticZero() const OVERRIDE {
2844    return std::fpclassify(value_) == FP_ZERO;
2845  }
2846  bool IsArithmeticPositiveZero() const {
2847    return IsArithmeticZero() && !std::signbit(value_);
2848  }
2849  bool IsArithmeticNegativeZero() const {
2850    return IsArithmeticZero() && std::signbit(value_);
2851  }
2852  bool IsZeroBitPattern() const OVERRIDE {
2853    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2854  }
2855  bool IsOne() const OVERRIDE {
2856    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2857  }
2858  bool IsNaN() const {
2859    return std::isnan(value_);
2860  }
2861
2862  DECLARE_INSTRUCTION(FloatConstant);
2863
2864 private:
2865  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2866      : HConstant(DataType::Type::kFloat32, dex_pc), value_(value) {}
2867  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2868      : HConstant(DataType::Type::kFloat32, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2869
2870  const float value_;
2871
2872  // Only the SsaBuilder and HGraph can create floating-point constants.
2873  friend class SsaBuilder;
2874  friend class HGraph;
2875  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2876};
2877
2878class HDoubleConstant FINAL : public HConstant {
2879 public:
2880  double GetValue() const { return value_; }
2881
2882  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2883
2884  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2885    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2886    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2887  }
2888
2889  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2890
2891  bool IsMinusOne() const OVERRIDE {
2892    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2893  }
2894  bool IsArithmeticZero() const OVERRIDE {
2895    return std::fpclassify(value_) == FP_ZERO;
2896  }
2897  bool IsArithmeticPositiveZero() const {
2898    return IsArithmeticZero() && !std::signbit(value_);
2899  }
2900  bool IsArithmeticNegativeZero() const {
2901    return IsArithmeticZero() && std::signbit(value_);
2902  }
2903  bool IsZeroBitPattern() const OVERRIDE {
2904    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2905  }
2906  bool IsOne() const OVERRIDE {
2907    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2908  }
2909  bool IsNaN() const {
2910    return std::isnan(value_);
2911  }
2912
2913  DECLARE_INSTRUCTION(DoubleConstant);
2914
2915 private:
2916  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2917      : HConstant(DataType::Type::kFloat64, dex_pc), value_(value) {}
2918  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2919      : HConstant(DataType::Type::kFloat64, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2920
2921  const double value_;
2922
2923  // Only the SsaBuilder and HGraph can create floating-point constants.
2924  friend class SsaBuilder;
2925  friend class HGraph;
2926  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2927};
2928
2929// Conditional branch. A block ending with an HIf instruction must have
2930// two successors.
2931class HIf FINAL : public HTemplateInstruction<1> {
2932 public:
2933  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2934      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2935    SetRawInputAt(0, input);
2936  }
2937
2938  bool IsControlFlow() const OVERRIDE { return true; }
2939
2940  HBasicBlock* IfTrueSuccessor() const {
2941    return GetBlock()->GetSuccessors()[0];
2942  }
2943
2944  HBasicBlock* IfFalseSuccessor() const {
2945    return GetBlock()->GetSuccessors()[1];
2946  }
2947
2948  DECLARE_INSTRUCTION(If);
2949
2950 private:
2951  DISALLOW_COPY_AND_ASSIGN(HIf);
2952};
2953
2954
2955// Abstract instruction which marks the beginning and/or end of a try block and
2956// links it to the respective exception handlers. Behaves the same as a Goto in
2957// non-exceptional control flow.
2958// Normal-flow successor is stored at index zero, exception handlers under
2959// higher indices in no particular order.
2960class HTryBoundary FINAL : public HTemplateInstruction<0> {
2961 public:
2962  enum class BoundaryKind {
2963    kEntry,
2964    kExit,
2965    kLast = kExit
2966  };
2967
2968  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2969      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2970    SetPackedField<BoundaryKindField>(kind);
2971  }
2972
2973  bool IsControlFlow() const OVERRIDE { return true; }
2974
2975  // Returns the block's non-exceptional successor (index zero).
2976  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2977
2978  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2979    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2980  }
2981
2982  // Returns whether `handler` is among its exception handlers (non-zero index
2983  // successors).
2984  bool HasExceptionHandler(const HBasicBlock& handler) const {
2985    DCHECK(handler.IsCatchBlock());
2986    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2987  }
2988
2989  // If not present already, adds `handler` to its block's list of exception
2990  // handlers.
2991  void AddExceptionHandler(HBasicBlock* handler) {
2992    if (!HasExceptionHandler(*handler)) {
2993      GetBlock()->AddSuccessor(handler);
2994    }
2995  }
2996
2997  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2998  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2999
3000  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3001
3002  DECLARE_INSTRUCTION(TryBoundary);
3003
3004 private:
3005  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3006  static constexpr size_t kFieldBoundaryKindSize =
3007      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3008  static constexpr size_t kNumberOfTryBoundaryPackedBits =
3009      kFieldBoundaryKind + kFieldBoundaryKindSize;
3010  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3011                "Too many packed fields.");
3012  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3013
3014  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
3015};
3016
3017// Deoptimize to interpreter, upon checking a condition.
3018class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3019 public:
3020  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3021  // across.
3022  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc)
3023      : HVariableInputSizeInstruction(
3024            SideEffects::All(),
3025            dex_pc,
3026            arena,
3027            /* number_of_inputs */ 1,
3028            kArenaAllocMisc) {
3029    SetPackedFlag<kFieldCanBeMoved>(false);
3030    SetPackedField<DeoptimizeKindField>(kind);
3031    SetRawInputAt(0, cond);
3032  }
3033
3034  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3035  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3036  // instead of `guard`.
3037  // We set CanTriggerGC to prevent any intermediate address to be live
3038  // at the point of the `HDeoptimize`.
3039  HDeoptimize(ArenaAllocator* arena,
3040              HInstruction* cond,
3041              HInstruction* guard,
3042              DeoptimizationKind kind,
3043              uint32_t dex_pc)
3044      : HVariableInputSizeInstruction(
3045            SideEffects::CanTriggerGC(),
3046            dex_pc,
3047            arena,
3048            /* number_of_inputs */ 2,
3049            kArenaAllocMisc) {
3050    SetPackedFlag<kFieldCanBeMoved>(true);
3051    SetPackedField<DeoptimizeKindField>(kind);
3052    SetRawInputAt(0, cond);
3053    SetRawInputAt(1, guard);
3054  }
3055
3056  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3057
3058  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3059    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3060  }
3061
3062  bool NeedsEnvironment() const OVERRIDE { return true; }
3063
3064  bool CanThrow() const OVERRIDE { return true; }
3065
3066  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3067
3068  DataType::Type GetType() const OVERRIDE {
3069    return GuardsAnInput() ? GuardedInput()->GetType() : DataType::Type::kVoid;
3070  }
3071
3072  bool GuardsAnInput() const {
3073    return InputCount() == 2;
3074  }
3075
3076  HInstruction* GuardedInput() const {
3077    DCHECK(GuardsAnInput());
3078    return InputAt(1);
3079  }
3080
3081  void RemoveGuard() {
3082    RemoveInputAt(1);
3083  }
3084
3085  DECLARE_INSTRUCTION(Deoptimize);
3086
3087 private:
3088  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3089  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3090  static constexpr size_t kFieldDeoptimizeKindSize =
3091      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3092  static constexpr size_t kNumberOfDeoptimizePackedBits =
3093      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3094  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3095                "Too many packed fields.");
3096  using DeoptimizeKindField =
3097      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3098
3099  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3100};
3101
3102// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3103// The compiled code checks this flag value in a guard before devirtualized call and
3104// if it's true, starts to do deoptimization.
3105// It has a 4-byte slot on stack.
3106// TODO: allocate a register for this flag.
3107class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3108 public:
3109  // CHA guards are only optimized in a separate pass and it has no side effects
3110  // with regard to other passes.
3111  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3112      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3113  }
3114
3115  DataType::Type GetType() const OVERRIDE { return DataType::Type::kInt32; }
3116
3117  // We do all CHA guard elimination/motion in a single pass, after which there is no
3118  // further guard elimination/motion since a guard might have been used for justification
3119  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3120  // to avoid other optimizations trying to move it.
3121  bool CanBeMoved() const OVERRIDE { return false; }
3122
3123  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3124
3125 private:
3126  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3127};
3128
3129// Represents the ArtMethod that was passed as a first argument to
3130// the method. It is used by instructions that depend on it, like
3131// instructions that work with the dex cache.
3132class HCurrentMethod FINAL : public HExpression<0> {
3133 public:
3134  explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3135      : HExpression(type, SideEffects::None(), dex_pc) {}
3136
3137  DECLARE_INSTRUCTION(CurrentMethod);
3138
3139 private:
3140  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3141};
3142
3143// Fetches an ArtMethod from the virtual table or the interface method table
3144// of a class.
3145class HClassTableGet FINAL : public HExpression<1> {
3146 public:
3147  enum class TableKind {
3148    kVTable,
3149    kIMTable,
3150    kLast = kIMTable
3151  };
3152  HClassTableGet(HInstruction* cls,
3153                 DataType::Type type,
3154                 TableKind kind,
3155                 size_t index,
3156                 uint32_t dex_pc)
3157      : HExpression(type, SideEffects::None(), dex_pc),
3158        index_(index) {
3159    SetPackedField<TableKindField>(kind);
3160    SetRawInputAt(0, cls);
3161  }
3162
3163  bool CanBeMoved() const OVERRIDE { return true; }
3164  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3165    return other->AsClassTableGet()->GetIndex() == index_ &&
3166        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3167  }
3168
3169  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3170  size_t GetIndex() const { return index_; }
3171
3172  DECLARE_INSTRUCTION(ClassTableGet);
3173
3174 private:
3175  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3176  static constexpr size_t kFieldTableKindSize =
3177      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3178  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3179  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3180                "Too many packed fields.");
3181  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3182
3183  // The index of the ArtMethod in the table.
3184  const size_t index_;
3185
3186  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3187};
3188
3189// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3190// have one successor for each entry in the switch table, and the final successor
3191// will be the block containing the next Dex opcode.
3192class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3193 public:
3194  HPackedSwitch(int32_t start_value,
3195                uint32_t num_entries,
3196                HInstruction* input,
3197                uint32_t dex_pc = kNoDexPc)
3198    : HTemplateInstruction(SideEffects::None(), dex_pc),
3199      start_value_(start_value),
3200      num_entries_(num_entries) {
3201    SetRawInputAt(0, input);
3202  }
3203
3204  bool IsControlFlow() const OVERRIDE { return true; }
3205
3206  int32_t GetStartValue() const { return start_value_; }
3207
3208  uint32_t GetNumEntries() const { return num_entries_; }
3209
3210  HBasicBlock* GetDefaultBlock() const {
3211    // Last entry is the default block.
3212    return GetBlock()->GetSuccessors()[num_entries_];
3213  }
3214  DECLARE_INSTRUCTION(PackedSwitch);
3215
3216 private:
3217  const int32_t start_value_;
3218  const uint32_t num_entries_;
3219
3220  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3221};
3222
3223class HUnaryOperation : public HExpression<1> {
3224 public:
3225  HUnaryOperation(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3226      : HExpression(result_type, SideEffects::None(), dex_pc) {
3227    SetRawInputAt(0, input);
3228  }
3229
3230  HInstruction* GetInput() const { return InputAt(0); }
3231  DataType::Type GetResultType() const { return GetType(); }
3232
3233  bool CanBeMoved() const OVERRIDE { return true; }
3234  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3235    return true;
3236  }
3237
3238  // Try to statically evaluate `this` and return a HConstant
3239  // containing the result of this evaluation.  If `this` cannot
3240  // be evaluated as a constant, return null.
3241  HConstant* TryStaticEvaluation() const;
3242
3243  // Apply this operation to `x`.
3244  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3245  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3246  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3247  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3248
3249  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3250
3251 private:
3252  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3253};
3254
3255class HBinaryOperation : public HExpression<2> {
3256 public:
3257  HBinaryOperation(DataType::Type result_type,
3258                   HInstruction* left,
3259                   HInstruction* right,
3260                   SideEffects side_effects = SideEffects::None(),
3261                   uint32_t dex_pc = kNoDexPc)
3262      : HExpression(result_type, side_effects, dex_pc) {
3263    SetRawInputAt(0, left);
3264    SetRawInputAt(1, right);
3265  }
3266
3267  HInstruction* GetLeft() const { return InputAt(0); }
3268  HInstruction* GetRight() const { return InputAt(1); }
3269  DataType::Type GetResultType() const { return GetType(); }
3270
3271  virtual bool IsCommutative() const { return false; }
3272
3273  // Put constant on the right.
3274  // Returns whether order is changed.
3275  bool OrderInputsWithConstantOnTheRight() {
3276    HInstruction* left = InputAt(0);
3277    HInstruction* right = InputAt(1);
3278    if (left->IsConstant() && !right->IsConstant()) {
3279      ReplaceInput(right, 0);
3280      ReplaceInput(left, 1);
3281      return true;
3282    }
3283    return false;
3284  }
3285
3286  // Order inputs by instruction id, but favor constant on the right side.
3287  // This helps GVN for commutative ops.
3288  void OrderInputs() {
3289    DCHECK(IsCommutative());
3290    HInstruction* left = InputAt(0);
3291    HInstruction* right = InputAt(1);
3292    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3293      return;
3294    }
3295    if (OrderInputsWithConstantOnTheRight()) {
3296      return;
3297    }
3298    // Order according to instruction id.
3299    if (left->GetId() > right->GetId()) {
3300      ReplaceInput(right, 0);
3301      ReplaceInput(left, 1);
3302    }
3303  }
3304
3305  bool CanBeMoved() const OVERRIDE { return true; }
3306  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3307    return true;
3308  }
3309
3310  // Try to statically evaluate `this` and return a HConstant
3311  // containing the result of this evaluation.  If `this` cannot
3312  // be evaluated as a constant, return null.
3313  HConstant* TryStaticEvaluation() const;
3314
3315  // Apply this operation to `x` and `y`.
3316  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3317                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3318    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3319    UNREACHABLE();
3320  }
3321  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3322  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3323  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3324                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3325    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3326    UNREACHABLE();
3327  }
3328  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3329  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3330
3331  // Returns an input that can legally be used as the right input and is
3332  // constant, or null.
3333  HConstant* GetConstantRight() const;
3334
3335  // If `GetConstantRight()` returns one of the input, this returns the other
3336  // one. Otherwise it returns null.
3337  HInstruction* GetLeastConstantLeft() const;
3338
3339  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3340
3341 private:
3342  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3343};
3344
3345// The comparison bias applies for floating point operations and indicates how NaN
3346// comparisons are treated:
3347enum class ComparisonBias {
3348  kNoBias,  // bias is not applicable (i.e. for long operation)
3349  kGtBias,  // return 1 for NaN comparisons
3350  kLtBias,  // return -1 for NaN comparisons
3351  kLast = kLtBias
3352};
3353
3354std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3355
3356class HCondition : public HBinaryOperation {
3357 public:
3358  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3359      : HBinaryOperation(DataType::Type::kBool, first, second, SideEffects::None(), dex_pc) {
3360    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3361  }
3362
3363  // For code generation purposes, returns whether this instruction is just before
3364  // `instruction`, and disregard moves in between.
3365  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3366
3367  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3368
3369  virtual IfCondition GetCondition() const = 0;
3370
3371  virtual IfCondition GetOppositeCondition() const = 0;
3372
3373  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3374  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3375
3376  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3377  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3378
3379  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3380    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3381  }
3382
3383  bool IsFPConditionTrueIfNaN() const {
3384    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3385    IfCondition if_cond = GetCondition();
3386    if (if_cond == kCondNE) {
3387      return true;
3388    } else if (if_cond == kCondEQ) {
3389      return false;
3390    }
3391    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3392  }
3393
3394  bool IsFPConditionFalseIfNaN() const {
3395    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3396    IfCondition if_cond = GetCondition();
3397    if (if_cond == kCondEQ) {
3398      return true;
3399    } else if (if_cond == kCondNE) {
3400      return false;
3401    }
3402    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3403  }
3404
3405 protected:
3406  // Needed if we merge a HCompare into a HCondition.
3407  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3408  static constexpr size_t kFieldComparisonBiasSize =
3409      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3410  static constexpr size_t kNumberOfConditionPackedBits =
3411      kFieldComparisonBias + kFieldComparisonBiasSize;
3412  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3413  using ComparisonBiasField =
3414      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3415
3416  template <typename T>
3417  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3418
3419  template <typename T>
3420  int32_t CompareFP(T x, T y) const {
3421    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3422    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3423    // Handle the bias.
3424    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3425  }
3426
3427  // Return an integer constant containing the result of a condition evaluated at compile time.
3428  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3429    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3430  }
3431
3432 private:
3433  DISALLOW_COPY_AND_ASSIGN(HCondition);
3434};
3435
3436// Instruction to check if two inputs are equal to each other.
3437class HEqual FINAL : public HCondition {
3438 public:
3439  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3440      : HCondition(first, second, dex_pc) {}
3441
3442  bool IsCommutative() const OVERRIDE { return true; }
3443
3444  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3445                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3446    return MakeConstantCondition(true, GetDexPc());
3447  }
3448  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3449    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3450  }
3451  // In the following Evaluate methods, a HCompare instruction has
3452  // been merged into this HEqual instruction; evaluate it as
3453  // `Compare(x, y) == 0`.
3454  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3455    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3456                                 GetDexPc());
3457  }
3458  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3459    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3460  }
3461  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3462    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3463  }
3464
3465  DECLARE_INSTRUCTION(Equal);
3466
3467  IfCondition GetCondition() const OVERRIDE {
3468    return kCondEQ;
3469  }
3470
3471  IfCondition GetOppositeCondition() const OVERRIDE {
3472    return kCondNE;
3473  }
3474
3475 private:
3476  template <typename T> static bool Compute(T x, T y) { return x == y; }
3477
3478  DISALLOW_COPY_AND_ASSIGN(HEqual);
3479};
3480
3481class HNotEqual FINAL : public HCondition {
3482 public:
3483  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3484      : HCondition(first, second, dex_pc) {}
3485
3486  bool IsCommutative() const OVERRIDE { return true; }
3487
3488  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3489                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3490    return MakeConstantCondition(false, GetDexPc());
3491  }
3492  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3493    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3494  }
3495  // In the following Evaluate methods, a HCompare instruction has
3496  // been merged into this HNotEqual instruction; evaluate it as
3497  // `Compare(x, y) != 0`.
3498  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3499    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3500  }
3501  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3502    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3503  }
3504  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3505    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3506  }
3507
3508  DECLARE_INSTRUCTION(NotEqual);
3509
3510  IfCondition GetCondition() const OVERRIDE {
3511    return kCondNE;
3512  }
3513
3514  IfCondition GetOppositeCondition() const OVERRIDE {
3515    return kCondEQ;
3516  }
3517
3518 private:
3519  template <typename T> static bool Compute(T x, T y) { return x != y; }
3520
3521  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3522};
3523
3524class HLessThan FINAL : public HCondition {
3525 public:
3526  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3527      : HCondition(first, second, dex_pc) {}
3528
3529  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3530    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3531  }
3532  // In the following Evaluate methods, a HCompare instruction has
3533  // been merged into this HLessThan instruction; evaluate it as
3534  // `Compare(x, y) < 0`.
3535  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3536    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3537  }
3538  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3539    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3540  }
3541  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3542    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3543  }
3544
3545  DECLARE_INSTRUCTION(LessThan);
3546
3547  IfCondition GetCondition() const OVERRIDE {
3548    return kCondLT;
3549  }
3550
3551  IfCondition GetOppositeCondition() const OVERRIDE {
3552    return kCondGE;
3553  }
3554
3555 private:
3556  template <typename T> static bool Compute(T x, T y) { return x < y; }
3557
3558  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3559};
3560
3561class HLessThanOrEqual FINAL : public HCondition {
3562 public:
3563  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3564      : HCondition(first, second, dex_pc) {}
3565
3566  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3567    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3568  }
3569  // In the following Evaluate methods, a HCompare instruction has
3570  // been merged into this HLessThanOrEqual instruction; evaluate it as
3571  // `Compare(x, y) <= 0`.
3572  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3573    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3574  }
3575  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3576    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3577  }
3578  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3579    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3580  }
3581
3582  DECLARE_INSTRUCTION(LessThanOrEqual);
3583
3584  IfCondition GetCondition() const OVERRIDE {
3585    return kCondLE;
3586  }
3587
3588  IfCondition GetOppositeCondition() const OVERRIDE {
3589    return kCondGT;
3590  }
3591
3592 private:
3593  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3594
3595  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3596};
3597
3598class HGreaterThan FINAL : public HCondition {
3599 public:
3600  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3601      : HCondition(first, second, dex_pc) {}
3602
3603  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3604    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3605  }
3606  // In the following Evaluate methods, a HCompare instruction has
3607  // been merged into this HGreaterThan instruction; evaluate it as
3608  // `Compare(x, y) > 0`.
3609  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3610    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3611  }
3612  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3613    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3614  }
3615  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3616    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3617  }
3618
3619  DECLARE_INSTRUCTION(GreaterThan);
3620
3621  IfCondition GetCondition() const OVERRIDE {
3622    return kCondGT;
3623  }
3624
3625  IfCondition GetOppositeCondition() const OVERRIDE {
3626    return kCondLE;
3627  }
3628
3629 private:
3630  template <typename T> static bool Compute(T x, T y) { return x > y; }
3631
3632  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3633};
3634
3635class HGreaterThanOrEqual FINAL : public HCondition {
3636 public:
3637  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3638      : HCondition(first, second, dex_pc) {}
3639
3640  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3641    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3642  }
3643  // In the following Evaluate methods, a HCompare instruction has
3644  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3645  // `Compare(x, y) >= 0`.
3646  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3647    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3648  }
3649  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3650    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3651  }
3652  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3653    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3654  }
3655
3656  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3657
3658  IfCondition GetCondition() const OVERRIDE {
3659    return kCondGE;
3660  }
3661
3662  IfCondition GetOppositeCondition() const OVERRIDE {
3663    return kCondLT;
3664  }
3665
3666 private:
3667  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3668
3669  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3670};
3671
3672class HBelow FINAL : public HCondition {
3673 public:
3674  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3675      : HCondition(first, second, dex_pc) {}
3676
3677  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3678    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3679  }
3680  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3681    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3682  }
3683  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3684                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3685    LOG(FATAL) << DebugName() << " is not defined for float values";
3686    UNREACHABLE();
3687  }
3688  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3689                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3690    LOG(FATAL) << DebugName() << " is not defined for double values";
3691    UNREACHABLE();
3692  }
3693
3694  DECLARE_INSTRUCTION(Below);
3695
3696  IfCondition GetCondition() const OVERRIDE {
3697    return kCondB;
3698  }
3699
3700  IfCondition GetOppositeCondition() const OVERRIDE {
3701    return kCondAE;
3702  }
3703
3704 private:
3705  template <typename T> static bool Compute(T x, T y) {
3706    return MakeUnsigned(x) < MakeUnsigned(y);
3707  }
3708
3709  DISALLOW_COPY_AND_ASSIGN(HBelow);
3710};
3711
3712class HBelowOrEqual FINAL : public HCondition {
3713 public:
3714  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3715      : HCondition(first, second, dex_pc) {}
3716
3717  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3718    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3719  }
3720  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3721    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3722  }
3723  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3724                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3725    LOG(FATAL) << DebugName() << " is not defined for float values";
3726    UNREACHABLE();
3727  }
3728  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3729                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3730    LOG(FATAL) << DebugName() << " is not defined for double values";
3731    UNREACHABLE();
3732  }
3733
3734  DECLARE_INSTRUCTION(BelowOrEqual);
3735
3736  IfCondition GetCondition() const OVERRIDE {
3737    return kCondBE;
3738  }
3739
3740  IfCondition GetOppositeCondition() const OVERRIDE {
3741    return kCondA;
3742  }
3743
3744 private:
3745  template <typename T> static bool Compute(T x, T y) {
3746    return MakeUnsigned(x) <= MakeUnsigned(y);
3747  }
3748
3749  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3750};
3751
3752class HAbove FINAL : public HCondition {
3753 public:
3754  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3755      : HCondition(first, second, dex_pc) {}
3756
3757  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3758    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3759  }
3760  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3761    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3762  }
3763  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3764                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3765    LOG(FATAL) << DebugName() << " is not defined for float values";
3766    UNREACHABLE();
3767  }
3768  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3769                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3770    LOG(FATAL) << DebugName() << " is not defined for double values";
3771    UNREACHABLE();
3772  }
3773
3774  DECLARE_INSTRUCTION(Above);
3775
3776  IfCondition GetCondition() const OVERRIDE {
3777    return kCondA;
3778  }
3779
3780  IfCondition GetOppositeCondition() const OVERRIDE {
3781    return kCondBE;
3782  }
3783
3784 private:
3785  template <typename T> static bool Compute(T x, T y) {
3786    return MakeUnsigned(x) > MakeUnsigned(y);
3787  }
3788
3789  DISALLOW_COPY_AND_ASSIGN(HAbove);
3790};
3791
3792class HAboveOrEqual FINAL : public HCondition {
3793 public:
3794  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3795      : HCondition(first, second, dex_pc) {}
3796
3797  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3798    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3799  }
3800  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3801    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3802  }
3803  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3804                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3805    LOG(FATAL) << DebugName() << " is not defined for float values";
3806    UNREACHABLE();
3807  }
3808  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3809                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3810    LOG(FATAL) << DebugName() << " is not defined for double values";
3811    UNREACHABLE();
3812  }
3813
3814  DECLARE_INSTRUCTION(AboveOrEqual);
3815
3816  IfCondition GetCondition() const OVERRIDE {
3817    return kCondAE;
3818  }
3819
3820  IfCondition GetOppositeCondition() const OVERRIDE {
3821    return kCondB;
3822  }
3823
3824 private:
3825  template <typename T> static bool Compute(T x, T y) {
3826    return MakeUnsigned(x) >= MakeUnsigned(y);
3827  }
3828
3829  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3830};
3831
3832// Instruction to check how two inputs compare to each other.
3833// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3834class HCompare FINAL : public HBinaryOperation {
3835 public:
3836  // Note that `comparison_type` is the type of comparison performed
3837  // between the comparison's inputs, not the type of the instantiated
3838  // HCompare instruction (which is always DataType::Type::kInt).
3839  HCompare(DataType::Type comparison_type,
3840           HInstruction* first,
3841           HInstruction* second,
3842           ComparisonBias bias,
3843           uint32_t dex_pc)
3844      : HBinaryOperation(DataType::Type::kInt32,
3845                         first,
3846                         second,
3847                         SideEffectsForArchRuntimeCalls(comparison_type),
3848                         dex_pc) {
3849    SetPackedField<ComparisonBiasField>(bias);
3850    DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
3851    DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
3852  }
3853
3854  template <typename T>
3855  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3856
3857  template <typename T>
3858  int32_t ComputeFP(T x, T y) const {
3859    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3860    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3861    // Handle the bias.
3862    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3863  }
3864
3865  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3866    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3867    // reach this code path when processing a freshly built HIR
3868    // graph. However HCompare integer instructions can be synthesized
3869    // by the instruction simplifier to implement IntegerCompare and
3870    // IntegerSignum intrinsics, so we have to handle this case.
3871    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3872  }
3873  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3874    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3875  }
3876  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3877    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3878  }
3879  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3880    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3881  }
3882
3883  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3884    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3885  }
3886
3887  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3888
3889  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3890  // Only meaningful for floating-point comparisons.
3891  bool IsGtBias() const {
3892    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3893    return GetBias() == ComparisonBias::kGtBias;
3894  }
3895
3896  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
3897    // Comparisons do not require a runtime call in any back end.
3898    return SideEffects::None();
3899  }
3900
3901  DECLARE_INSTRUCTION(Compare);
3902
3903 protected:
3904  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3905  static constexpr size_t kFieldComparisonBiasSize =
3906      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3907  static constexpr size_t kNumberOfComparePackedBits =
3908      kFieldComparisonBias + kFieldComparisonBiasSize;
3909  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3910  using ComparisonBiasField =
3911      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3912
3913  // Return an integer constant containing the result of a comparison evaluated at compile time.
3914  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3915    DCHECK(value == -1 || value == 0 || value == 1) << value;
3916    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3917  }
3918
3919 private:
3920  DISALLOW_COPY_AND_ASSIGN(HCompare);
3921};
3922
3923class HNewInstance FINAL : public HExpression<1> {
3924 public:
3925  HNewInstance(HInstruction* cls,
3926               uint32_t dex_pc,
3927               dex::TypeIndex type_index,
3928               const DexFile& dex_file,
3929               bool finalizable,
3930               QuickEntrypointEnum entrypoint)
3931      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc),
3932        type_index_(type_index),
3933        dex_file_(dex_file),
3934        entrypoint_(entrypoint) {
3935    SetPackedFlag<kFlagFinalizable>(finalizable);
3936    SetRawInputAt(0, cls);
3937  }
3938
3939  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3940  const DexFile& GetDexFile() const { return dex_file_; }
3941
3942  // Calls runtime so needs an environment.
3943  bool NeedsEnvironment() const OVERRIDE { return true; }
3944
3945  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3946  bool CanThrow() const OVERRIDE { return true; }
3947
3948  bool NeedsChecks() const {
3949    return entrypoint_ == kQuickAllocObjectWithChecks;
3950  }
3951
3952  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3953
3954  bool CanBeNull() const OVERRIDE { return false; }
3955
3956  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3957
3958  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3959    entrypoint_ = entrypoint;
3960  }
3961
3962  HLoadClass* GetLoadClass() const {
3963    HInstruction* input = InputAt(0);
3964    if (input->IsClinitCheck()) {
3965      input = input->InputAt(0);
3966    }
3967    DCHECK(input->IsLoadClass());
3968    return input->AsLoadClass();
3969  }
3970
3971  bool IsStringAlloc() const;
3972
3973  DECLARE_INSTRUCTION(NewInstance);
3974
3975 private:
3976  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3977  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3978  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3979                "Too many packed fields.");
3980
3981  const dex::TypeIndex type_index_;
3982  const DexFile& dex_file_;
3983  QuickEntrypointEnum entrypoint_;
3984
3985  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3986};
3987
3988enum IntrinsicNeedsEnvironmentOrCache {
3989  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3990  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3991};
3992
3993enum IntrinsicSideEffects {
3994  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3995  kReadSideEffects,   // Intrinsic may read heap memory.
3996  kWriteSideEffects,  // Intrinsic may write heap memory.
3997  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3998};
3999
4000enum IntrinsicExceptions {
4001  kNoThrow,  // Intrinsic does not throw any exceptions.
4002  kCanThrow  // Intrinsic may throw exceptions.
4003};
4004
4005class HInvoke : public HVariableInputSizeInstruction {
4006 public:
4007  bool NeedsEnvironment() const OVERRIDE;
4008
4009  void SetArgumentAt(size_t index, HInstruction* argument) {
4010    SetRawInputAt(index, argument);
4011  }
4012
4013  // Return the number of arguments.  This number can be lower than
4014  // the number of inputs returned by InputCount(), as some invoke
4015  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4016  // inputs at the end of their list of inputs.
4017  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4018
4019  DataType::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4020
4021  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4022
4023  InvokeType GetInvokeType() const {
4024    return GetPackedField<InvokeTypeField>();
4025  }
4026
4027  Intrinsics GetIntrinsic() const {
4028    return intrinsic_;
4029  }
4030
4031  void SetIntrinsic(Intrinsics intrinsic,
4032                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4033                    IntrinsicSideEffects side_effects,
4034                    IntrinsicExceptions exceptions);
4035
4036  bool IsFromInlinedInvoke() const {
4037    return GetEnvironment()->IsFromInlinedInvoke();
4038  }
4039
4040  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4041
4042  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4043
4044  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4045
4046  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4047    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4048  }
4049
4050  uint32_t* GetIntrinsicOptimizations() {
4051    return &intrinsic_optimizations_;
4052  }
4053
4054  const uint32_t* GetIntrinsicOptimizations() const {
4055    return &intrinsic_optimizations_;
4056  }
4057
4058  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4059
4060  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4061  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4062
4063  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4064
4065 protected:
4066  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4067  static constexpr size_t kFieldInvokeTypeSize =
4068      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4069  static constexpr size_t kFieldReturnType =
4070      kFieldInvokeType + kFieldInvokeTypeSize;
4071  static constexpr size_t kFieldReturnTypeSize =
4072      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4073  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4074  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4075  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4076  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4077  using ReturnTypeField = BitField<DataType::Type, kFieldReturnType, kFieldReturnTypeSize>;
4078
4079  HInvoke(ArenaAllocator* arena,
4080          uint32_t number_of_arguments,
4081          uint32_t number_of_other_inputs,
4082          DataType::Type return_type,
4083          uint32_t dex_pc,
4084          uint32_t dex_method_index,
4085          ArtMethod* resolved_method,
4086          InvokeType invoke_type)
4087    : HVariableInputSizeInstruction(
4088          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4089          dex_pc,
4090          arena,
4091          number_of_arguments + number_of_other_inputs,
4092          kArenaAllocInvokeInputs),
4093      number_of_arguments_(number_of_arguments),
4094      resolved_method_(resolved_method),
4095      dex_method_index_(dex_method_index),
4096      intrinsic_(Intrinsics::kNone),
4097      intrinsic_optimizations_(0) {
4098    SetPackedField<ReturnTypeField>(return_type);
4099    SetPackedField<InvokeTypeField>(invoke_type);
4100    SetPackedFlag<kFlagCanThrow>(true);
4101  }
4102
4103  uint32_t number_of_arguments_;
4104  ArtMethod* resolved_method_;
4105  const uint32_t dex_method_index_;
4106  Intrinsics intrinsic_;
4107
4108  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4109  uint32_t intrinsic_optimizations_;
4110
4111 private:
4112  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4113};
4114
4115class HInvokeUnresolved FINAL : public HInvoke {
4116 public:
4117  HInvokeUnresolved(ArenaAllocator* arena,
4118                    uint32_t number_of_arguments,
4119                    DataType::Type return_type,
4120                    uint32_t dex_pc,
4121                    uint32_t dex_method_index,
4122                    InvokeType invoke_type)
4123      : HInvoke(arena,
4124                number_of_arguments,
4125                0u /* number_of_other_inputs */,
4126                return_type,
4127                dex_pc,
4128                dex_method_index,
4129                nullptr,
4130                invoke_type) {
4131  }
4132
4133  DECLARE_INSTRUCTION(InvokeUnresolved);
4134
4135 private:
4136  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4137};
4138
4139class HInvokePolymorphic FINAL : public HInvoke {
4140 public:
4141  HInvokePolymorphic(ArenaAllocator* arena,
4142                     uint32_t number_of_arguments,
4143                     DataType::Type return_type,
4144                     uint32_t dex_pc,
4145                     uint32_t dex_method_index)
4146      : HInvoke(arena,
4147                number_of_arguments,
4148                0u /* number_of_other_inputs */,
4149                return_type,
4150                dex_pc,
4151                dex_method_index,
4152                nullptr,
4153                kVirtual) {}
4154
4155  DECLARE_INSTRUCTION(InvokePolymorphic);
4156
4157 private:
4158  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4159};
4160
4161class HInvokeStaticOrDirect FINAL : public HInvoke {
4162 public:
4163  // Requirements of this method call regarding the class
4164  // initialization (clinit) check of its declaring class.
4165  enum class ClinitCheckRequirement {
4166    kNone,      // Class already initialized.
4167    kExplicit,  // Static call having explicit clinit check as last input.
4168    kImplicit,  // Static call implicitly requiring a clinit check.
4169    kLast = kImplicit
4170  };
4171
4172  // Determines how to load the target ArtMethod*.
4173  enum class MethodLoadKind {
4174    // Use a String init ArtMethod* loaded from Thread entrypoints.
4175    kStringInit,
4176
4177    // Use the method's own ArtMethod* loaded by the register allocator.
4178    kRecursive,
4179
4180    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4181    // Used for boot image methods referenced by boot image code.
4182    kBootImageLinkTimePcRelative,
4183
4184    // Use ArtMethod* at a known address, embed the direct address in the code.
4185    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4186    kDirectAddress,
4187
4188    // Load from an entry in the .bss section using a PC-relative load.
4189    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
4190    kBssEntry,
4191
4192    // Make a runtime call to resolve and call the method. This is the last-resort-kind
4193    // used when other kinds are unimplemented on a particular architecture.
4194    kRuntimeCall,
4195  };
4196
4197  // Determines the location of the code pointer.
4198  enum class CodePtrLocation {
4199    // Recursive call, use local PC-relative call instruction.
4200    kCallSelf,
4201
4202    // Use code pointer from the ArtMethod*.
4203    // Used when we don't know the target code. This is also the last-resort-kind used when
4204    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4205    kCallArtMethod,
4206  };
4207
4208  struct DispatchInfo {
4209    MethodLoadKind method_load_kind;
4210    CodePtrLocation code_ptr_location;
4211    // The method load data holds
4212    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4213    //     Note that there are multiple string init methods, each having its own offset.
4214    //   - the method address for kDirectAddress
4215    uint64_t method_load_data;
4216  };
4217
4218  HInvokeStaticOrDirect(ArenaAllocator* arena,
4219                        uint32_t number_of_arguments,
4220                        DataType::Type return_type,
4221                        uint32_t dex_pc,
4222                        uint32_t method_index,
4223                        ArtMethod* resolved_method,
4224                        DispatchInfo dispatch_info,
4225                        InvokeType invoke_type,
4226                        MethodReference target_method,
4227                        ClinitCheckRequirement clinit_check_requirement)
4228      : HInvoke(arena,
4229                number_of_arguments,
4230                // There is potentially one extra argument for the HCurrentMethod node, and
4231                // potentially one other if the clinit check is explicit, and potentially
4232                // one other if the method is a string factory.
4233                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4234                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4235                return_type,
4236                dex_pc,
4237                method_index,
4238                resolved_method,
4239                invoke_type),
4240        target_method_(target_method),
4241        dispatch_info_(dispatch_info) {
4242    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4243  }
4244
4245  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4246    bool had_current_method_input = HasCurrentMethodInput();
4247    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4248
4249    // Using the current method is the default and once we find a better
4250    // method load kind, we should not go back to using the current method.
4251    DCHECK(had_current_method_input || !needs_current_method_input);
4252
4253    if (had_current_method_input && !needs_current_method_input) {
4254      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4255      RemoveInputAt(GetSpecialInputIndex());
4256    }
4257    dispatch_info_ = dispatch_info;
4258  }
4259
4260  DispatchInfo GetDispatchInfo() const {
4261    return dispatch_info_;
4262  }
4263
4264  void AddSpecialInput(HInstruction* input) {
4265    // We allow only one special input.
4266    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4267    DCHECK(InputCount() == GetSpecialInputIndex() ||
4268           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4269    InsertInputAt(GetSpecialInputIndex(), input);
4270  }
4271
4272  using HInstruction::GetInputRecords;  // Keep the const version visible.
4273  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4274    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4275    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4276      DCHECK(!input_records.empty());
4277      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4278      HInstruction* last_input = input_records.back().GetInstruction();
4279      // Note: `last_input` may be null during arguments setup.
4280      if (last_input != nullptr) {
4281        // `last_input` is the last input of a static invoke marked as having
4282        // an explicit clinit check. It must either be:
4283        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4284        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4285        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4286      }
4287    }
4288    return input_records;
4289  }
4290
4291  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4292    // We access the method via the dex cache so we can't do an implicit null check.
4293    // TODO: for intrinsics we can generate implicit null checks.
4294    return false;
4295  }
4296
4297  bool CanBeNull() const OVERRIDE {
4298    return GetPackedField<ReturnTypeField>() == DataType::Type::kReference && !IsStringInit();
4299  }
4300
4301  // Get the index of the special input, if any.
4302  //
4303  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4304  // method pointer; otherwise there may be one platform-specific special input,
4305  // such as PC-relative addressing base.
4306  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4307  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4308
4309  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4310  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4311  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4312  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4313  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4314  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4315  bool HasPcRelativeMethodLoadKind() const {
4316    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4317           GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4318  }
4319  bool HasCurrentMethodInput() const {
4320    // This function can be called only after the invoke has been fully initialized by the builder.
4321    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4322      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4323      return true;
4324    } else {
4325      DCHECK(InputCount() == GetSpecialInputIndex() ||
4326             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4327      return false;
4328    }
4329  }
4330
4331  QuickEntrypointEnum GetStringInitEntryPoint() const {
4332    DCHECK(IsStringInit());
4333    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4334  }
4335
4336  uint64_t GetMethodAddress() const {
4337    DCHECK(HasMethodAddress());
4338    return dispatch_info_.method_load_data;
4339  }
4340
4341  const DexFile& GetDexFileForPcRelativeDexCache() const;
4342
4343  ClinitCheckRequirement GetClinitCheckRequirement() const {
4344    return GetPackedField<ClinitCheckRequirementField>();
4345  }
4346
4347  // Is this instruction a call to a static method?
4348  bool IsStatic() const {
4349    return GetInvokeType() == kStatic;
4350  }
4351
4352  MethodReference GetTargetMethod() const {
4353    return target_method_;
4354  }
4355
4356  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4357  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4358  // instruction; only relevant for static calls with explicit clinit check.
4359  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4360    DCHECK(IsStaticWithExplicitClinitCheck());
4361    size_t last_input_index = inputs_.size() - 1u;
4362    HInstruction* last_input = inputs_.back().GetInstruction();
4363    DCHECK(last_input != nullptr);
4364    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4365    RemoveAsUserOfInput(last_input_index);
4366    inputs_.pop_back();
4367    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4368    DCHECK(!IsStaticWithExplicitClinitCheck());
4369  }
4370
4371  // Is this a call to a static method whose declaring class has an
4372  // explicit initialization check in the graph?
4373  bool IsStaticWithExplicitClinitCheck() const {
4374    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4375  }
4376
4377  // Is this a call to a static method whose declaring class has an
4378  // implicit intialization check requirement?
4379  bool IsStaticWithImplicitClinitCheck() const {
4380    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4381  }
4382
4383  // Does this method load kind need the current method as an input?
4384  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4385    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4386  }
4387
4388  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4389
4390 private:
4391  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4392  static constexpr size_t kFieldClinitCheckRequirementSize =
4393      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4394  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4395      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4396  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4397                "Too many packed fields.");
4398  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4399                                               kFieldClinitCheckRequirement,
4400                                               kFieldClinitCheckRequirementSize>;
4401
4402  // Cached values of the resolved method, to avoid needing the mutator lock.
4403  MethodReference target_method_;
4404  DispatchInfo dispatch_info_;
4405
4406  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4407};
4408std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4409std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4410
4411class HInvokeVirtual FINAL : public HInvoke {
4412 public:
4413  HInvokeVirtual(ArenaAllocator* arena,
4414                 uint32_t number_of_arguments,
4415                 DataType::Type return_type,
4416                 uint32_t dex_pc,
4417                 uint32_t dex_method_index,
4418                 ArtMethod* resolved_method,
4419                 uint32_t vtable_index)
4420      : HInvoke(arena,
4421                number_of_arguments,
4422                0u,
4423                return_type,
4424                dex_pc,
4425                dex_method_index,
4426                resolved_method,
4427                kVirtual),
4428        vtable_index_(vtable_index) {}
4429
4430  bool CanBeNull() const OVERRIDE {
4431    switch (GetIntrinsic()) {
4432      case Intrinsics::kThreadCurrentThread:
4433      case Intrinsics::kStringBufferAppend:
4434      case Intrinsics::kStringBufferToString:
4435      case Intrinsics::kStringBuilderAppend:
4436      case Intrinsics::kStringBuilderToString:
4437        return false;
4438      default:
4439        return HInvoke::CanBeNull();
4440    }
4441  }
4442
4443  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4444    // TODO: Add implicit null checks in intrinsics.
4445    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4446  }
4447
4448  uint32_t GetVTableIndex() const { return vtable_index_; }
4449
4450  DECLARE_INSTRUCTION(InvokeVirtual);
4451
4452 private:
4453  // Cached value of the resolved method, to avoid needing the mutator lock.
4454  const uint32_t vtable_index_;
4455
4456  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4457};
4458
4459class HInvokeInterface FINAL : public HInvoke {
4460 public:
4461  HInvokeInterface(ArenaAllocator* arena,
4462                   uint32_t number_of_arguments,
4463                   DataType::Type return_type,
4464                   uint32_t dex_pc,
4465                   uint32_t dex_method_index,
4466                   ArtMethod* resolved_method,
4467                   uint32_t imt_index)
4468      : HInvoke(arena,
4469                number_of_arguments,
4470                0u,
4471                return_type,
4472                dex_pc,
4473                dex_method_index,
4474                resolved_method,
4475                kInterface),
4476        imt_index_(imt_index) {}
4477
4478  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4479    // TODO: Add implicit null checks in intrinsics.
4480    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4481  }
4482
4483  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4484    // The assembly stub currently needs it.
4485    return true;
4486  }
4487
4488  uint32_t GetImtIndex() const { return imt_index_; }
4489  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4490
4491  DECLARE_INSTRUCTION(InvokeInterface);
4492
4493 private:
4494  // Cached value of the resolved method, to avoid needing the mutator lock.
4495  const uint32_t imt_index_;
4496
4497  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4498};
4499
4500class HNeg FINAL : public HUnaryOperation {
4501 public:
4502  HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4503      : HUnaryOperation(result_type, input, dex_pc) {
4504    DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4505  }
4506
4507  template <typename T> static T Compute(T x) { return -x; }
4508
4509  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4510    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4511  }
4512  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4513    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4514  }
4515  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4516    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4517  }
4518  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4519    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4520  }
4521
4522  DECLARE_INSTRUCTION(Neg);
4523
4524 private:
4525  DISALLOW_COPY_AND_ASSIGN(HNeg);
4526};
4527
4528class HNewArray FINAL : public HExpression<2> {
4529 public:
4530  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4531      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4532    SetRawInputAt(0, cls);
4533    SetRawInputAt(1, length);
4534  }
4535
4536  // Calls runtime so needs an environment.
4537  bool NeedsEnvironment() const OVERRIDE { return true; }
4538
4539  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4540  bool CanThrow() const OVERRIDE { return true; }
4541
4542  bool CanBeNull() const OVERRIDE { return false; }
4543
4544  HLoadClass* GetLoadClass() const {
4545    DCHECK(InputAt(0)->IsLoadClass());
4546    return InputAt(0)->AsLoadClass();
4547  }
4548
4549  HInstruction* GetLength() const {
4550    return InputAt(1);
4551  }
4552
4553  DECLARE_INSTRUCTION(NewArray);
4554
4555 private:
4556  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4557};
4558
4559class HAdd FINAL : public HBinaryOperation {
4560 public:
4561  HAdd(DataType::Type result_type,
4562       HInstruction* left,
4563       HInstruction* right,
4564       uint32_t dex_pc = kNoDexPc)
4565      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4566
4567  bool IsCommutative() const OVERRIDE { return true; }
4568
4569  template <typename T> static T Compute(T x, T y) { return x + y; }
4570
4571  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4572    return GetBlock()->GetGraph()->GetIntConstant(
4573        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4574  }
4575  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4576    return GetBlock()->GetGraph()->GetLongConstant(
4577        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4578  }
4579  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4580    return GetBlock()->GetGraph()->GetFloatConstant(
4581        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4582  }
4583  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4584    return GetBlock()->GetGraph()->GetDoubleConstant(
4585        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4586  }
4587
4588  DECLARE_INSTRUCTION(Add);
4589
4590 private:
4591  DISALLOW_COPY_AND_ASSIGN(HAdd);
4592};
4593
4594class HSub FINAL : public HBinaryOperation {
4595 public:
4596  HSub(DataType::Type result_type,
4597       HInstruction* left,
4598       HInstruction* right,
4599       uint32_t dex_pc = kNoDexPc)
4600      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4601
4602  template <typename T> static T Compute(T x, T y) { return x - y; }
4603
4604  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4605    return GetBlock()->GetGraph()->GetIntConstant(
4606        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4607  }
4608  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4609    return GetBlock()->GetGraph()->GetLongConstant(
4610        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4611  }
4612  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4613    return GetBlock()->GetGraph()->GetFloatConstant(
4614        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4615  }
4616  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4617    return GetBlock()->GetGraph()->GetDoubleConstant(
4618        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4619  }
4620
4621  DECLARE_INSTRUCTION(Sub);
4622
4623 private:
4624  DISALLOW_COPY_AND_ASSIGN(HSub);
4625};
4626
4627class HMul FINAL : public HBinaryOperation {
4628 public:
4629  HMul(DataType::Type result_type,
4630       HInstruction* left,
4631       HInstruction* right,
4632       uint32_t dex_pc = kNoDexPc)
4633      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4634
4635  bool IsCommutative() const OVERRIDE { return true; }
4636
4637  template <typename T> static T Compute(T x, T y) { return x * y; }
4638
4639  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4640    return GetBlock()->GetGraph()->GetIntConstant(
4641        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4642  }
4643  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4644    return GetBlock()->GetGraph()->GetLongConstant(
4645        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4646  }
4647  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4648    return GetBlock()->GetGraph()->GetFloatConstant(
4649        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4650  }
4651  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4652    return GetBlock()->GetGraph()->GetDoubleConstant(
4653        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4654  }
4655
4656  DECLARE_INSTRUCTION(Mul);
4657
4658 private:
4659  DISALLOW_COPY_AND_ASSIGN(HMul);
4660};
4661
4662class HDiv FINAL : public HBinaryOperation {
4663 public:
4664  HDiv(DataType::Type result_type,
4665       HInstruction* left,
4666       HInstruction* right,
4667       uint32_t dex_pc)
4668      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4669
4670  template <typename T>
4671  T ComputeIntegral(T x, T y) const {
4672    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4673    // Our graph structure ensures we never have 0 for `y` during
4674    // constant folding.
4675    DCHECK_NE(y, 0);
4676    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4677    return (y == -1) ? -x : x / y;
4678  }
4679
4680  template <typename T>
4681  T ComputeFP(T x, T y) const {
4682    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4683    return x / y;
4684  }
4685
4686  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4687    return GetBlock()->GetGraph()->GetIntConstant(
4688        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4689  }
4690  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4691    return GetBlock()->GetGraph()->GetLongConstant(
4692        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4693  }
4694  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4695    return GetBlock()->GetGraph()->GetFloatConstant(
4696        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4697  }
4698  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4699    return GetBlock()->GetGraph()->GetDoubleConstant(
4700        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4701  }
4702
4703  DECLARE_INSTRUCTION(Div);
4704
4705 private:
4706  DISALLOW_COPY_AND_ASSIGN(HDiv);
4707};
4708
4709class HRem FINAL : public HBinaryOperation {
4710 public:
4711  HRem(DataType::Type result_type,
4712       HInstruction* left,
4713       HInstruction* right,
4714       uint32_t dex_pc)
4715      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4716
4717  template <typename T>
4718  T ComputeIntegral(T x, T y) const {
4719    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4720    // Our graph structure ensures we never have 0 for `y` during
4721    // constant folding.
4722    DCHECK_NE(y, 0);
4723    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4724    return (y == -1) ? 0 : x % y;
4725  }
4726
4727  template <typename T>
4728  T ComputeFP(T x, T y) const {
4729    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4730    return std::fmod(x, y);
4731  }
4732
4733  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4734    return GetBlock()->GetGraph()->GetIntConstant(
4735        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4736  }
4737  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4738    return GetBlock()->GetGraph()->GetLongConstant(
4739        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4740  }
4741  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4742    return GetBlock()->GetGraph()->GetFloatConstant(
4743        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4744  }
4745  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4746    return GetBlock()->GetGraph()->GetDoubleConstant(
4747        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4748  }
4749
4750  DECLARE_INSTRUCTION(Rem);
4751
4752 private:
4753  DISALLOW_COPY_AND_ASSIGN(HRem);
4754};
4755
4756class HDivZeroCheck FINAL : public HExpression<1> {
4757 public:
4758  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4759  // constructor.
4760  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4761      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4762    SetRawInputAt(0, value);
4763  }
4764
4765  DataType::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4766
4767  bool CanBeMoved() const OVERRIDE { return true; }
4768
4769  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4770    return true;
4771  }
4772
4773  bool NeedsEnvironment() const OVERRIDE { return true; }
4774  bool CanThrow() const OVERRIDE { return true; }
4775
4776  DECLARE_INSTRUCTION(DivZeroCheck);
4777
4778 private:
4779  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4780};
4781
4782class HShl FINAL : public HBinaryOperation {
4783 public:
4784  HShl(DataType::Type result_type,
4785       HInstruction* value,
4786       HInstruction* distance,
4787       uint32_t dex_pc = kNoDexPc)
4788      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4789    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4790    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4791  }
4792
4793  template <typename T>
4794  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4795    return value << (distance & max_shift_distance);
4796  }
4797
4798  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4799    return GetBlock()->GetGraph()->GetIntConstant(
4800        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4801  }
4802  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4803    return GetBlock()->GetGraph()->GetLongConstant(
4804        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4805  }
4806  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4807                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4808    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4809    UNREACHABLE();
4810  }
4811  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4812                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4813    LOG(FATAL) << DebugName() << " is not defined for float values";
4814    UNREACHABLE();
4815  }
4816  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4817                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4818    LOG(FATAL) << DebugName() << " is not defined for double values";
4819    UNREACHABLE();
4820  }
4821
4822  DECLARE_INSTRUCTION(Shl);
4823
4824 private:
4825  DISALLOW_COPY_AND_ASSIGN(HShl);
4826};
4827
4828class HShr FINAL : public HBinaryOperation {
4829 public:
4830  HShr(DataType::Type result_type,
4831       HInstruction* value,
4832       HInstruction* distance,
4833       uint32_t dex_pc = kNoDexPc)
4834      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4835    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4836    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4837  }
4838
4839  template <typename T>
4840  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4841    return value >> (distance & max_shift_distance);
4842  }
4843
4844  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4845    return GetBlock()->GetGraph()->GetIntConstant(
4846        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4847  }
4848  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4849    return GetBlock()->GetGraph()->GetLongConstant(
4850        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4851  }
4852  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4853                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4854    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4855    UNREACHABLE();
4856  }
4857  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4858                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4859    LOG(FATAL) << DebugName() << " is not defined for float values";
4860    UNREACHABLE();
4861  }
4862  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4863                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4864    LOG(FATAL) << DebugName() << " is not defined for double values";
4865    UNREACHABLE();
4866  }
4867
4868  DECLARE_INSTRUCTION(Shr);
4869
4870 private:
4871  DISALLOW_COPY_AND_ASSIGN(HShr);
4872};
4873
4874class HUShr FINAL : public HBinaryOperation {
4875 public:
4876  HUShr(DataType::Type result_type,
4877        HInstruction* value,
4878        HInstruction* distance,
4879        uint32_t dex_pc = kNoDexPc)
4880      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4881    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4882    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4883  }
4884
4885  template <typename T>
4886  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4887    typedef typename std::make_unsigned<T>::type V;
4888    V ux = static_cast<V>(value);
4889    return static_cast<T>(ux >> (distance & max_shift_distance));
4890  }
4891
4892  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4893    return GetBlock()->GetGraph()->GetIntConstant(
4894        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4895  }
4896  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4897    return GetBlock()->GetGraph()->GetLongConstant(
4898        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4899  }
4900  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4901                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4902    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4903    UNREACHABLE();
4904  }
4905  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4906                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4907    LOG(FATAL) << DebugName() << " is not defined for float values";
4908    UNREACHABLE();
4909  }
4910  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4911                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4912    LOG(FATAL) << DebugName() << " is not defined for double values";
4913    UNREACHABLE();
4914  }
4915
4916  DECLARE_INSTRUCTION(UShr);
4917
4918 private:
4919  DISALLOW_COPY_AND_ASSIGN(HUShr);
4920};
4921
4922class HAnd FINAL : public HBinaryOperation {
4923 public:
4924  HAnd(DataType::Type result_type,
4925       HInstruction* left,
4926       HInstruction* right,
4927       uint32_t dex_pc = kNoDexPc)
4928      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4929
4930  bool IsCommutative() const OVERRIDE { return true; }
4931
4932  template <typename T> static T Compute(T x, T y) { return x & y; }
4933
4934  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4935    return GetBlock()->GetGraph()->GetIntConstant(
4936        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4937  }
4938  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4939    return GetBlock()->GetGraph()->GetLongConstant(
4940        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4941  }
4942  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4943                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4944    LOG(FATAL) << DebugName() << " is not defined for float values";
4945    UNREACHABLE();
4946  }
4947  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4948                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4949    LOG(FATAL) << DebugName() << " is not defined for double values";
4950    UNREACHABLE();
4951  }
4952
4953  DECLARE_INSTRUCTION(And);
4954
4955 private:
4956  DISALLOW_COPY_AND_ASSIGN(HAnd);
4957};
4958
4959class HOr FINAL : public HBinaryOperation {
4960 public:
4961  HOr(DataType::Type result_type,
4962      HInstruction* left,
4963      HInstruction* right,
4964      uint32_t dex_pc = kNoDexPc)
4965      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4966
4967  bool IsCommutative() const OVERRIDE { return true; }
4968
4969  template <typename T> static T Compute(T x, T y) { return x | y; }
4970
4971  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4972    return GetBlock()->GetGraph()->GetIntConstant(
4973        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4974  }
4975  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4976    return GetBlock()->GetGraph()->GetLongConstant(
4977        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4978  }
4979  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4980                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4981    LOG(FATAL) << DebugName() << " is not defined for float values";
4982    UNREACHABLE();
4983  }
4984  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4985                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4986    LOG(FATAL) << DebugName() << " is not defined for double values";
4987    UNREACHABLE();
4988  }
4989
4990  DECLARE_INSTRUCTION(Or);
4991
4992 private:
4993  DISALLOW_COPY_AND_ASSIGN(HOr);
4994};
4995
4996class HXor FINAL : public HBinaryOperation {
4997 public:
4998  HXor(DataType::Type result_type,
4999       HInstruction* left,
5000       HInstruction* right,
5001       uint32_t dex_pc = kNoDexPc)
5002      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5003
5004  bool IsCommutative() const OVERRIDE { return true; }
5005
5006  template <typename T> static T Compute(T x, T y) { return x ^ y; }
5007
5008  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5009    return GetBlock()->GetGraph()->GetIntConstant(
5010        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5011  }
5012  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5013    return GetBlock()->GetGraph()->GetLongConstant(
5014        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5015  }
5016  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5017                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5018    LOG(FATAL) << DebugName() << " is not defined for float values";
5019    UNREACHABLE();
5020  }
5021  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5022                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5023    LOG(FATAL) << DebugName() << " is not defined for double values";
5024    UNREACHABLE();
5025  }
5026
5027  DECLARE_INSTRUCTION(Xor);
5028
5029 private:
5030  DISALLOW_COPY_AND_ASSIGN(HXor);
5031};
5032
5033class HRor FINAL : public HBinaryOperation {
5034 public:
5035  HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5036    : HBinaryOperation(result_type, value, distance) {
5037    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5038    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5039  }
5040
5041  template <typename T>
5042  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5043    typedef typename std::make_unsigned<T>::type V;
5044    V ux = static_cast<V>(value);
5045    if ((distance & max_shift_value) == 0) {
5046      return static_cast<T>(ux);
5047    } else {
5048      const V reg_bits = sizeof(T) * 8;
5049      return static_cast<T>(ux >> (distance & max_shift_value)) |
5050                           (value << (reg_bits - (distance & max_shift_value)));
5051    }
5052  }
5053
5054  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5055    return GetBlock()->GetGraph()->GetIntConstant(
5056        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5057  }
5058  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5059    return GetBlock()->GetGraph()->GetLongConstant(
5060        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5061  }
5062  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5063                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5064    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5065    UNREACHABLE();
5066  }
5067  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5068                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5069    LOG(FATAL) << DebugName() << " is not defined for float values";
5070    UNREACHABLE();
5071  }
5072  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5073                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5074    LOG(FATAL) << DebugName() << " is not defined for double values";
5075    UNREACHABLE();
5076  }
5077
5078  DECLARE_INSTRUCTION(Ror);
5079
5080 private:
5081  DISALLOW_COPY_AND_ASSIGN(HRor);
5082};
5083
5084// The value of a parameter in this method. Its location depends on
5085// the calling convention.
5086class HParameterValue FINAL : public HExpression<0> {
5087 public:
5088  HParameterValue(const DexFile& dex_file,
5089                  dex::TypeIndex type_index,
5090                  uint8_t index,
5091                  DataType::Type parameter_type,
5092                  bool is_this = false)
5093      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5094        dex_file_(dex_file),
5095        type_index_(type_index),
5096        index_(index) {
5097    SetPackedFlag<kFlagIsThis>(is_this);
5098    SetPackedFlag<kFlagCanBeNull>(!is_this);
5099  }
5100
5101  const DexFile& GetDexFile() const { return dex_file_; }
5102  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5103  uint8_t GetIndex() const { return index_; }
5104  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5105
5106  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5107  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5108
5109  DECLARE_INSTRUCTION(ParameterValue);
5110
5111 private:
5112  // Whether or not the parameter value corresponds to 'this' argument.
5113  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5114  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5115  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5116  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5117                "Too many packed fields.");
5118
5119  const DexFile& dex_file_;
5120  const dex::TypeIndex type_index_;
5121  // The index of this parameter in the parameters list. Must be less
5122  // than HGraph::number_of_in_vregs_.
5123  const uint8_t index_;
5124
5125  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5126};
5127
5128class HNot FINAL : public HUnaryOperation {
5129 public:
5130  HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5131      : HUnaryOperation(result_type, input, dex_pc) {}
5132
5133  bool CanBeMoved() const OVERRIDE { return true; }
5134  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5135    return true;
5136  }
5137
5138  template <typename T> static T Compute(T x) { return ~x; }
5139
5140  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5141    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5142  }
5143  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5144    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5145  }
5146  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5147    LOG(FATAL) << DebugName() << " is not defined for float values";
5148    UNREACHABLE();
5149  }
5150  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5151    LOG(FATAL) << DebugName() << " is not defined for double values";
5152    UNREACHABLE();
5153  }
5154
5155  DECLARE_INSTRUCTION(Not);
5156
5157 private:
5158  DISALLOW_COPY_AND_ASSIGN(HNot);
5159};
5160
5161class HBooleanNot FINAL : public HUnaryOperation {
5162 public:
5163  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5164      : HUnaryOperation(DataType::Type::kBool, input, dex_pc) {}
5165
5166  bool CanBeMoved() const OVERRIDE { return true; }
5167  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5168    return true;
5169  }
5170
5171  template <typename T> static bool Compute(T x) {
5172    DCHECK(IsUint<1>(x)) << x;
5173    return !x;
5174  }
5175
5176  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5177    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5178  }
5179  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5180    LOG(FATAL) << DebugName() << " is not defined for long values";
5181    UNREACHABLE();
5182  }
5183  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5184    LOG(FATAL) << DebugName() << " is not defined for float values";
5185    UNREACHABLE();
5186  }
5187  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5188    LOG(FATAL) << DebugName() << " is not defined for double values";
5189    UNREACHABLE();
5190  }
5191
5192  DECLARE_INSTRUCTION(BooleanNot);
5193
5194 private:
5195  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5196};
5197
5198class HTypeConversion FINAL : public HExpression<1> {
5199 public:
5200  // Instantiate a type conversion of `input` to `result_type`.
5201  HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5202      : HExpression(result_type, SideEffects::None(), dex_pc) {
5203    SetRawInputAt(0, input);
5204    // Invariant: We should never generate a conversion to a Boolean value.
5205    DCHECK_NE(DataType::Type::kBool, result_type);
5206  }
5207
5208  HInstruction* GetInput() const { return InputAt(0); }
5209  DataType::Type GetInputType() const { return GetInput()->GetType(); }
5210  DataType::Type GetResultType() const { return GetType(); }
5211
5212  bool CanBeMoved() const OVERRIDE { return true; }
5213  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5214    return true;
5215  }
5216
5217  // Try to statically evaluate the conversion and return a HConstant
5218  // containing the result.  If the input cannot be converted, return nullptr.
5219  HConstant* TryStaticEvaluation() const;
5220
5221  DECLARE_INSTRUCTION(TypeConversion);
5222
5223 private:
5224  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5225};
5226
5227static constexpr uint32_t kNoRegNumber = -1;
5228
5229class HNullCheck FINAL : public HExpression<1> {
5230 public:
5231  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5232  // constructor.
5233  HNullCheck(HInstruction* value, uint32_t dex_pc)
5234      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5235    SetRawInputAt(0, value);
5236  }
5237
5238  bool CanBeMoved() const OVERRIDE { return true; }
5239  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5240    return true;
5241  }
5242
5243  bool NeedsEnvironment() const OVERRIDE { return true; }
5244
5245  bool CanThrow() const OVERRIDE { return true; }
5246
5247  bool CanBeNull() const OVERRIDE { return false; }
5248
5249
5250  DECLARE_INSTRUCTION(NullCheck);
5251
5252 private:
5253  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5254};
5255
5256// Embeds an ArtField and all the information required by the compiler. We cache
5257// that information to avoid requiring the mutator lock every time we need it.
5258class FieldInfo : public ValueObject {
5259 public:
5260  FieldInfo(ArtField* field,
5261            MemberOffset field_offset,
5262            DataType::Type field_type,
5263            bool is_volatile,
5264            uint32_t index,
5265            uint16_t declaring_class_def_index,
5266            const DexFile& dex_file)
5267      : field_(field),
5268        field_offset_(field_offset),
5269        field_type_(field_type),
5270        is_volatile_(is_volatile),
5271        index_(index),
5272        declaring_class_def_index_(declaring_class_def_index),
5273        dex_file_(dex_file) {}
5274
5275  ArtField* GetField() const { return field_; }
5276  MemberOffset GetFieldOffset() const { return field_offset_; }
5277  DataType::Type GetFieldType() const { return field_type_; }
5278  uint32_t GetFieldIndex() const { return index_; }
5279  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5280  const DexFile& GetDexFile() const { return dex_file_; }
5281  bool IsVolatile() const { return is_volatile_; }
5282
5283 private:
5284  ArtField* const field_;
5285  const MemberOffset field_offset_;
5286  const DataType::Type field_type_;
5287  const bool is_volatile_;
5288  const uint32_t index_;
5289  const uint16_t declaring_class_def_index_;
5290  const DexFile& dex_file_;
5291};
5292
5293class HInstanceFieldGet FINAL : public HExpression<1> {
5294 public:
5295  HInstanceFieldGet(HInstruction* value,
5296                    ArtField* field,
5297                    DataType::Type field_type,
5298                    MemberOffset field_offset,
5299                    bool is_volatile,
5300                    uint32_t field_idx,
5301                    uint16_t declaring_class_def_index,
5302                    const DexFile& dex_file,
5303                    uint32_t dex_pc)
5304      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5305        field_info_(field,
5306                    field_offset,
5307                    field_type,
5308                    is_volatile,
5309                    field_idx,
5310                    declaring_class_def_index,
5311                    dex_file) {
5312    SetRawInputAt(0, value);
5313  }
5314
5315  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5316
5317  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5318    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5319    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5320  }
5321
5322  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5323    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5324  }
5325
5326  size_t ComputeHashCode() const OVERRIDE {
5327    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5328  }
5329
5330  const FieldInfo& GetFieldInfo() const { return field_info_; }
5331  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5332  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5333  bool IsVolatile() const { return field_info_.IsVolatile(); }
5334
5335  DECLARE_INSTRUCTION(InstanceFieldGet);
5336
5337 private:
5338  const FieldInfo field_info_;
5339
5340  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5341};
5342
5343class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5344 public:
5345  HInstanceFieldSet(HInstruction* object,
5346                    HInstruction* value,
5347                    ArtField* field,
5348                    DataType::Type field_type,
5349                    MemberOffset field_offset,
5350                    bool is_volatile,
5351                    uint32_t field_idx,
5352                    uint16_t declaring_class_def_index,
5353                    const DexFile& dex_file,
5354                    uint32_t dex_pc)
5355      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5356        field_info_(field,
5357                    field_offset,
5358                    field_type,
5359                    is_volatile,
5360                    field_idx,
5361                    declaring_class_def_index,
5362                    dex_file) {
5363    SetPackedFlag<kFlagValueCanBeNull>(true);
5364    SetRawInputAt(0, object);
5365    SetRawInputAt(1, value);
5366  }
5367
5368  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5369    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5370  }
5371
5372  const FieldInfo& GetFieldInfo() const { return field_info_; }
5373  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5374  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5375  bool IsVolatile() const { return field_info_.IsVolatile(); }
5376  HInstruction* GetValue() const { return InputAt(1); }
5377  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5378  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5379
5380  DECLARE_INSTRUCTION(InstanceFieldSet);
5381
5382 private:
5383  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5384  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5385  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5386                "Too many packed fields.");
5387
5388  const FieldInfo field_info_;
5389
5390  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5391};
5392
5393class HArrayGet FINAL : public HExpression<2> {
5394 public:
5395  HArrayGet(HInstruction* array,
5396            HInstruction* index,
5397            DataType::Type type,
5398            uint32_t dex_pc)
5399     : HArrayGet(array,
5400                 index,
5401                 type,
5402                 SideEffects::ArrayReadOfType(type),
5403                 dex_pc,
5404                 /* is_string_char_at */ false) {}
5405
5406  HArrayGet(HInstruction* array,
5407            HInstruction* index,
5408            DataType::Type type,
5409            SideEffects side_effects,
5410            uint32_t dex_pc,
5411            bool is_string_char_at)
5412      : HExpression(type, side_effects, dex_pc) {
5413    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5414    SetRawInputAt(0, array);
5415    SetRawInputAt(1, index);
5416  }
5417
5418  bool CanBeMoved() const OVERRIDE { return true; }
5419  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5420    return true;
5421  }
5422  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5423    // TODO: We can be smarter here.
5424    // Currently, unless the array is the result of NewArray, the array access is always
5425    // preceded by some form of null NullCheck necessary for the bounds check, usually
5426    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5427    // dynamic BCE. There are cases when these could be removed to produce better code.
5428    // If we ever add optimizations to do so we should allow an implicit check here
5429    // (as long as the address falls in the first page).
5430    //
5431    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5432    //     a = cond ? new int[1] : null;
5433    //     a[0];  // The Phi does not need bounds check for either input.
5434    return false;
5435  }
5436
5437  bool IsEquivalentOf(HArrayGet* other) const {
5438    bool result = (GetDexPc() == other->GetDexPc());
5439    if (kIsDebugBuild && result) {
5440      DCHECK_EQ(GetBlock(), other->GetBlock());
5441      DCHECK_EQ(GetArray(), other->GetArray());
5442      DCHECK_EQ(GetIndex(), other->GetIndex());
5443      if (DataType::IsIntOrLongType(GetType())) {
5444        DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5445      } else {
5446        DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5447        DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5448      }
5449    }
5450    return result;
5451  }
5452
5453  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5454
5455  HInstruction* GetArray() const { return InputAt(0); }
5456  HInstruction* GetIndex() const { return InputAt(1); }
5457
5458  DECLARE_INSTRUCTION(ArrayGet);
5459
5460 private:
5461  // We treat a String as an array, creating the HArrayGet from String.charAt()
5462  // intrinsic in the instruction simplifier. We can always determine whether
5463  // a particular HArrayGet is actually a String.charAt() by looking at the type
5464  // of the input but that requires holding the mutator lock, so we prefer to use
5465  // a flag, so that code generators don't need to do the locking.
5466  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5467  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5468  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5469                "Too many packed fields.");
5470
5471  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5472};
5473
5474class HArraySet FINAL : public HTemplateInstruction<3> {
5475 public:
5476  HArraySet(HInstruction* array,
5477            HInstruction* index,
5478            HInstruction* value,
5479            DataType::Type expected_component_type,
5480            uint32_t dex_pc)
5481      : HArraySet(array,
5482                  index,
5483                  value,
5484                  expected_component_type,
5485                  // Make a best guess for side effects now, may be refined during SSA building.
5486                  ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
5487                  dex_pc) {}
5488
5489  HArraySet(HInstruction* array,
5490            HInstruction* index,
5491            HInstruction* value,
5492            DataType::Type expected_component_type,
5493            SideEffects side_effects,
5494            uint32_t dex_pc)
5495      : HTemplateInstruction(side_effects, dex_pc) {
5496    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5497    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
5498    SetPackedFlag<kFlagValueCanBeNull>(true);
5499    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5500    SetRawInputAt(0, array);
5501    SetRawInputAt(1, index);
5502    SetRawInputAt(2, value);
5503  }
5504
5505  bool NeedsEnvironment() const OVERRIDE {
5506    // We call a runtime method to throw ArrayStoreException.
5507    return NeedsTypeCheck();
5508  }
5509
5510  // Can throw ArrayStoreException.
5511  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5512
5513  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5514    // TODO: Same as for ArrayGet.
5515    return false;
5516  }
5517
5518  void ClearNeedsTypeCheck() {
5519    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5520  }
5521
5522  void ClearValueCanBeNull() {
5523    SetPackedFlag<kFlagValueCanBeNull>(false);
5524  }
5525
5526  void SetStaticTypeOfArrayIsObjectArray() {
5527    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5528  }
5529
5530  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5531  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5532  bool StaticTypeOfArrayIsObjectArray() const {
5533    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5534  }
5535
5536  HInstruction* GetArray() const { return InputAt(0); }
5537  HInstruction* GetIndex() const { return InputAt(1); }
5538  HInstruction* GetValue() const { return InputAt(2); }
5539
5540  DataType::Type GetComponentType() const {
5541    return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
5542  }
5543
5544  static DataType::Type GetComponentType(DataType::Type value_type,
5545                                         DataType::Type expected_component_type) {
5546    // The Dex format does not type floating point index operations. Since the
5547    // `expected_component_type` comes from SSA building and can therefore not
5548    // be correct, we also check what is the value type. If it is a floating
5549    // point type, we must use that type.
5550    return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
5551        ? value_type
5552        : expected_component_type;
5553  }
5554
5555  DataType::Type GetRawExpectedComponentType() const {
5556    return GetPackedField<ExpectedComponentTypeField>();
5557  }
5558
5559  static SideEffects ComputeSideEffects(DataType::Type type) {
5560    return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
5561  }
5562
5563  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
5564    return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
5565                                                      : SideEffects::None();
5566  }
5567
5568  DECLARE_INSTRUCTION(ArraySet);
5569
5570 private:
5571  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5572  static constexpr size_t kFieldExpectedComponentTypeSize =
5573      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
5574  static constexpr size_t kFlagNeedsTypeCheck =
5575      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5576  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5577  // Cached information for the reference_type_info_ so that codegen
5578  // does not need to inspect the static type.
5579  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5580  static constexpr size_t kNumberOfArraySetPackedBits =
5581      kFlagStaticTypeOfArrayIsObjectArray + 1;
5582  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5583  using ExpectedComponentTypeField =
5584      BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5585
5586  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5587};
5588
5589class HArrayLength FINAL : public HExpression<1> {
5590 public:
5591  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5592      : HExpression(DataType::Type::kInt32, SideEffects::None(), dex_pc) {
5593    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5594    // Note that arrays do not change length, so the instruction does not
5595    // depend on any write.
5596    SetRawInputAt(0, array);
5597  }
5598
5599  bool CanBeMoved() const OVERRIDE { return true; }
5600  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5601    return true;
5602  }
5603  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5604    return obj == InputAt(0);
5605  }
5606
5607  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5608
5609  DECLARE_INSTRUCTION(ArrayLength);
5610
5611 private:
5612  // We treat a String as an array, creating the HArrayLength from String.length()
5613  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5614  // determine whether a particular HArrayLength is actually a String.length() by
5615  // looking at the type of the input but that requires holding the mutator lock, so
5616  // we prefer to use a flag, so that code generators don't need to do the locking.
5617  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5618  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5619  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5620                "Too many packed fields.");
5621
5622  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5623};
5624
5625class HBoundsCheck FINAL : public HExpression<2> {
5626 public:
5627  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5628  // constructor.
5629  HBoundsCheck(HInstruction* index,
5630               HInstruction* length,
5631               uint32_t dex_pc,
5632               bool string_char_at = false)
5633      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5634    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
5635    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5636    SetRawInputAt(0, index);
5637    SetRawInputAt(1, length);
5638  }
5639
5640  bool CanBeMoved() const OVERRIDE { return true; }
5641  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5642    return true;
5643  }
5644
5645  bool NeedsEnvironment() const OVERRIDE { return true; }
5646
5647  bool CanThrow() const OVERRIDE { return true; }
5648
5649  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5650
5651  HInstruction* GetIndex() const { return InputAt(0); }
5652
5653  DECLARE_INSTRUCTION(BoundsCheck);
5654
5655 private:
5656  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5657
5658  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5659};
5660
5661class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5662 public:
5663  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5664      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5665
5666  bool NeedsEnvironment() const OVERRIDE {
5667    return true;
5668  }
5669
5670  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5671  SlowPathCode* GetSlowPath() const { return slow_path_; }
5672
5673  DECLARE_INSTRUCTION(SuspendCheck);
5674
5675 private:
5676  // Only used for code generation, in order to share the same slow path between back edges
5677  // of a same loop.
5678  SlowPathCode* slow_path_;
5679
5680  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5681};
5682
5683// Pseudo-instruction which provides the native debugger with mapping information.
5684// It ensures that we can generate line number and local variables at this point.
5685class HNativeDebugInfo : public HTemplateInstruction<0> {
5686 public:
5687  explicit HNativeDebugInfo(uint32_t dex_pc)
5688      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5689
5690  bool NeedsEnvironment() const OVERRIDE {
5691    return true;
5692  }
5693
5694  DECLARE_INSTRUCTION(NativeDebugInfo);
5695
5696 private:
5697  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5698};
5699
5700/**
5701 * Instruction to load a Class object.
5702 */
5703class HLoadClass FINAL : public HInstruction {
5704 public:
5705  // Determines how to load the Class.
5706  enum class LoadKind {
5707    // We cannot load this class. See HSharpening::SharpenLoadClass.
5708    kInvalid = -1,
5709
5710    // Use the Class* from the method's own ArtMethod*.
5711    kReferrersClass,
5712
5713    // Use PC-relative boot image Class* address that will be known at link time.
5714    // Used for boot image classes referenced by boot image code.
5715    kBootImageLinkTimePcRelative,
5716
5717    // Use a known boot image Class* address, embedded in the code by the codegen.
5718    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5719    kBootImageAddress,
5720
5721    // Use a PC-relative load from a boot image ClassTable mmapped into the .bss
5722    // of the oat file.
5723    kBootImageClassTable,
5724
5725    // Load from an entry in the .bss section using a PC-relative load.
5726    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5727    kBssEntry,
5728
5729    // Load from the root table associated with the JIT compiled method.
5730    kJitTableAddress,
5731
5732    // Load using a simple runtime call. This is the fall-back load kind when
5733    // the codegen is unable to use another appropriate kind.
5734    kRuntimeCall,
5735
5736    kLast = kRuntimeCall
5737  };
5738
5739  HLoadClass(HCurrentMethod* current_method,
5740             dex::TypeIndex type_index,
5741             const DexFile& dex_file,
5742             Handle<mirror::Class> klass,
5743             bool is_referrers_class,
5744             uint32_t dex_pc,
5745             bool needs_access_check)
5746      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5747        special_input_(HUserRecord<HInstruction*>(current_method)),
5748        type_index_(type_index),
5749        dex_file_(dex_file),
5750        klass_(klass),
5751        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5752    // Referrers class should not need access check. We never inline unverified
5753    // methods so we can't possibly end up in this situation.
5754    DCHECK(!is_referrers_class || !needs_access_check);
5755
5756    SetPackedField<LoadKindField>(
5757        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
5758    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5759    SetPackedFlag<kFlagIsInBootImage>(false);
5760    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5761  }
5762
5763  void SetLoadKind(LoadKind load_kind);
5764
5765  LoadKind GetLoadKind() const {
5766    return GetPackedField<LoadKindField>();
5767  }
5768
5769  bool CanBeMoved() const OVERRIDE { return true; }
5770
5771  bool InstructionDataEquals(const HInstruction* other) const;
5772
5773  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5774
5775  bool CanBeNull() const OVERRIDE { return false; }
5776
5777  bool NeedsEnvironment() const OVERRIDE {
5778    return CanCallRuntime();
5779  }
5780
5781  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5782    // The entrypoint the code generator is going to call does not do
5783    // clinit of the class.
5784    DCHECK(!NeedsAccessCheck());
5785    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5786  }
5787
5788  bool CanCallRuntime() const {
5789    return NeedsAccessCheck() ||
5790           MustGenerateClinitCheck() ||
5791           GetLoadKind() == LoadKind::kRuntimeCall ||
5792           GetLoadKind() == LoadKind::kBssEntry;
5793  }
5794
5795  bool CanThrow() const OVERRIDE {
5796    return NeedsAccessCheck() ||
5797           MustGenerateClinitCheck() ||
5798           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5799           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5800           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5801           ((GetLoadKind() == LoadKind::kRuntimeCall ||
5802             GetLoadKind() == LoadKind::kBssEntry) &&
5803            !IsInBootImage());
5804  }
5805
5806  ReferenceTypeInfo GetLoadedClassRTI() {
5807    return loaded_class_rti_;
5808  }
5809
5810  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5811    // Make sure we only set exact types (the loaded class should never be merged).
5812    DCHECK(rti.IsExact());
5813    loaded_class_rti_ = rti;
5814  }
5815
5816  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5817  const DexFile& GetDexFile() const { return dex_file_; }
5818
5819  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5820    return GetLoadKind() == LoadKind::kRuntimeCall;
5821  }
5822
5823  static SideEffects SideEffectsForArchRuntimeCalls() {
5824    return SideEffects::CanTriggerGC();
5825  }
5826
5827  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5828  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5829  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5830  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5831
5832  void MarkInBootImage() {
5833    SetPackedFlag<kFlagIsInBootImage>(true);
5834  }
5835
5836  void AddSpecialInput(HInstruction* special_input);
5837
5838  using HInstruction::GetInputRecords;  // Keep the const version visible.
5839  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5840    return ArrayRef<HUserRecord<HInstruction*>>(
5841        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5842  }
5843
5844  DataType::Type GetType() const OVERRIDE {
5845    return DataType::Type::kReference;
5846  }
5847
5848  Handle<mirror::Class> GetClass() const {
5849    return klass_;
5850  }
5851
5852  DECLARE_INSTRUCTION(LoadClass);
5853
5854 private:
5855  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5856  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5857  // Whether this instruction must generate the initialization check.
5858  // Used for code generation.
5859  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5860  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5861  static constexpr size_t kFieldLoadKindSize =
5862      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5863  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5864  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5865  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5866
5867  static bool HasTypeReference(LoadKind load_kind) {
5868    return load_kind == LoadKind::kReferrersClass ||
5869        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5870        load_kind == LoadKind::kBootImageClassTable ||
5871        load_kind == LoadKind::kBssEntry ||
5872        load_kind == LoadKind::kRuntimeCall;
5873  }
5874
5875  void SetLoadKindInternal(LoadKind load_kind);
5876
5877  // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
5878  // For other load kinds it's empty or possibly some architecture-specific instruction
5879  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5880  HUserRecord<HInstruction*> special_input_;
5881
5882  // A type index and dex file where the class can be accessed. The dex file can be:
5883  // - The compiling method's dex file if the class is defined there too.
5884  // - The compiling method's dex file if the class is referenced there.
5885  // - The dex file where the class is defined. When the load kind can only be
5886  //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
5887  const dex::TypeIndex type_index_;
5888  const DexFile& dex_file_;
5889
5890  Handle<mirror::Class> klass_;
5891
5892  ReferenceTypeInfo loaded_class_rti_;
5893
5894  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5895};
5896std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5897
5898// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5899inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5900  // The special input is used for PC-relative loads on some architectures,
5901  // including literal pool loads, which are PC-relative too.
5902  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5903         GetLoadKind() == LoadKind::kBootImageAddress ||
5904         GetLoadKind() == LoadKind::kBootImageClassTable ||
5905         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5906  DCHECK(special_input_.GetInstruction() == nullptr);
5907  special_input_ = HUserRecord<HInstruction*>(special_input);
5908  special_input->AddUseAt(this, 0);
5909}
5910
5911class HLoadString FINAL : public HInstruction {
5912 public:
5913  // Determines how to load the String.
5914  enum class LoadKind {
5915    // Use PC-relative boot image String* address that will be known at link time.
5916    // Used for boot image strings referenced by boot image code.
5917    kBootImageLinkTimePcRelative,
5918
5919    // Use a known boot image String* address, embedded in the code by the codegen.
5920    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5921    kBootImageAddress,
5922
5923    // Use a PC-relative load from a boot image InternTable mmapped into the .bss
5924    // of the oat file.
5925    kBootImageInternTable,
5926
5927    // Load from an entry in the .bss section using a PC-relative load.
5928    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5929    kBssEntry,
5930
5931    // Load from the root table associated with the JIT compiled method.
5932    kJitTableAddress,
5933
5934    // Load using a simple runtime call. This is the fall-back load kind when
5935    // the codegen is unable to use another appropriate kind.
5936    kRuntimeCall,
5937
5938    kLast = kRuntimeCall,
5939  };
5940
5941  HLoadString(HCurrentMethod* current_method,
5942              dex::StringIndex string_index,
5943              const DexFile& dex_file,
5944              uint32_t dex_pc)
5945      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5946        special_input_(HUserRecord<HInstruction*>(current_method)),
5947        string_index_(string_index),
5948        dex_file_(dex_file) {
5949    SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
5950  }
5951
5952  void SetLoadKind(LoadKind load_kind);
5953
5954  LoadKind GetLoadKind() const {
5955    return GetPackedField<LoadKindField>();
5956  }
5957
5958  const DexFile& GetDexFile() const {
5959    return dex_file_;
5960  }
5961
5962  dex::StringIndex GetStringIndex() const {
5963    return string_index_;
5964  }
5965
5966  Handle<mirror::String> GetString() const {
5967    return string_;
5968  }
5969
5970  void SetString(Handle<mirror::String> str) {
5971    string_ = str;
5972  }
5973
5974  bool CanBeMoved() const OVERRIDE { return true; }
5975
5976  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5977
5978  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5979
5980  // Will call the runtime if we need to load the string through
5981  // the dex cache and the string is not guaranteed to be there yet.
5982  bool NeedsEnvironment() const OVERRIDE {
5983    LoadKind load_kind = GetLoadKind();
5984    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5985        load_kind == LoadKind::kBootImageAddress ||
5986        load_kind == LoadKind::kBootImageInternTable ||
5987        load_kind == LoadKind::kJitTableAddress) {
5988      return false;
5989    }
5990    return true;
5991  }
5992
5993  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5994    return GetLoadKind() == LoadKind::kRuntimeCall;
5995  }
5996
5997  bool CanBeNull() const OVERRIDE { return false; }
5998  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5999
6000  static SideEffects SideEffectsForArchRuntimeCalls() {
6001    return SideEffects::CanTriggerGC();
6002  }
6003
6004  void AddSpecialInput(HInstruction* special_input);
6005
6006  using HInstruction::GetInputRecords;  // Keep the const version visible.
6007  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6008    return ArrayRef<HUserRecord<HInstruction*>>(
6009        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6010  }
6011
6012  DataType::Type GetType() const OVERRIDE {
6013    return DataType::Type::kReference;
6014  }
6015
6016  DECLARE_INSTRUCTION(LoadString);
6017
6018 private:
6019  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6020  static constexpr size_t kFieldLoadKindSize =
6021      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6022  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6023  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6024  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6025
6026  void SetLoadKindInternal(LoadKind load_kind);
6027
6028  // The special input is the HCurrentMethod for kRuntimeCall.
6029  // For other load kinds it's empty or possibly some architecture-specific instruction
6030  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6031  HUserRecord<HInstruction*> special_input_;
6032
6033  dex::StringIndex string_index_;
6034  const DexFile& dex_file_;
6035
6036  Handle<mirror::String> string_;
6037
6038  DISALLOW_COPY_AND_ASSIGN(HLoadString);
6039};
6040std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6041
6042// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6043inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6044  // The special input is used for PC-relative loads on some architectures,
6045  // including literal pool loads, which are PC-relative too.
6046  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6047         GetLoadKind() == LoadKind::kBootImageAddress ||
6048         GetLoadKind() == LoadKind::kBootImageInternTable ||
6049         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6050  // HLoadString::GetInputRecords() returns an empty array at this point,
6051  // so use the GetInputRecords() from the base class to set the input record.
6052  DCHECK(special_input_.GetInstruction() == nullptr);
6053  special_input_ = HUserRecord<HInstruction*>(special_input);
6054  special_input->AddUseAt(this, 0);
6055}
6056
6057/**
6058 * Performs an initialization check on its Class object input.
6059 */
6060class HClinitCheck FINAL : public HExpression<1> {
6061 public:
6062  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6063      : HExpression(
6064            DataType::Type::kReference,
6065            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6066            dex_pc) {
6067    SetRawInputAt(0, constant);
6068  }
6069
6070  bool CanBeMoved() const OVERRIDE { return true; }
6071  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6072    return true;
6073  }
6074
6075  bool NeedsEnvironment() const OVERRIDE {
6076    // May call runtime to initialize the class.
6077    return true;
6078  }
6079
6080  bool CanThrow() const OVERRIDE { return true; }
6081
6082  HLoadClass* GetLoadClass() const {
6083    DCHECK(InputAt(0)->IsLoadClass());
6084    return InputAt(0)->AsLoadClass();
6085  }
6086
6087  DECLARE_INSTRUCTION(ClinitCheck);
6088
6089 private:
6090  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6091};
6092
6093class HStaticFieldGet FINAL : public HExpression<1> {
6094 public:
6095  HStaticFieldGet(HInstruction* cls,
6096                  ArtField* field,
6097                  DataType::Type field_type,
6098                  MemberOffset field_offset,
6099                  bool is_volatile,
6100                  uint32_t field_idx,
6101                  uint16_t declaring_class_def_index,
6102                  const DexFile& dex_file,
6103                  uint32_t dex_pc)
6104      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6105        field_info_(field,
6106                    field_offset,
6107                    field_type,
6108                    is_volatile,
6109                    field_idx,
6110                    declaring_class_def_index,
6111                    dex_file) {
6112    SetRawInputAt(0, cls);
6113  }
6114
6115
6116  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6117
6118  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6119    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6120    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6121  }
6122
6123  size_t ComputeHashCode() const OVERRIDE {
6124    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6125  }
6126
6127  const FieldInfo& GetFieldInfo() const { return field_info_; }
6128  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6129  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6130  bool IsVolatile() const { return field_info_.IsVolatile(); }
6131
6132  DECLARE_INSTRUCTION(StaticFieldGet);
6133
6134 private:
6135  const FieldInfo field_info_;
6136
6137  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6138};
6139
6140class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6141 public:
6142  HStaticFieldSet(HInstruction* cls,
6143                  HInstruction* value,
6144                  ArtField* field,
6145                  DataType::Type field_type,
6146                  MemberOffset field_offset,
6147                  bool is_volatile,
6148                  uint32_t field_idx,
6149                  uint16_t declaring_class_def_index,
6150                  const DexFile& dex_file,
6151                  uint32_t dex_pc)
6152      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6153        field_info_(field,
6154                    field_offset,
6155                    field_type,
6156                    is_volatile,
6157                    field_idx,
6158                    declaring_class_def_index,
6159                    dex_file) {
6160    SetPackedFlag<kFlagValueCanBeNull>(true);
6161    SetRawInputAt(0, cls);
6162    SetRawInputAt(1, value);
6163  }
6164
6165  const FieldInfo& GetFieldInfo() const { return field_info_; }
6166  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6167  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6168  bool IsVolatile() const { return field_info_.IsVolatile(); }
6169
6170  HInstruction* GetValue() const { return InputAt(1); }
6171  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6172  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6173
6174  DECLARE_INSTRUCTION(StaticFieldSet);
6175
6176 private:
6177  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6178  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6179  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6180                "Too many packed fields.");
6181
6182  const FieldInfo field_info_;
6183
6184  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6185};
6186
6187class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6188 public:
6189  HUnresolvedInstanceFieldGet(HInstruction* obj,
6190                              DataType::Type field_type,
6191                              uint32_t field_index,
6192                              uint32_t dex_pc)
6193      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6194        field_index_(field_index) {
6195    SetRawInputAt(0, obj);
6196  }
6197
6198  bool NeedsEnvironment() const OVERRIDE { return true; }
6199  bool CanThrow() const OVERRIDE { return true; }
6200
6201  DataType::Type GetFieldType() const { return GetType(); }
6202  uint32_t GetFieldIndex() const { return field_index_; }
6203
6204  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6205
6206 private:
6207  const uint32_t field_index_;
6208
6209  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6210};
6211
6212class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6213 public:
6214  HUnresolvedInstanceFieldSet(HInstruction* obj,
6215                              HInstruction* value,
6216                              DataType::Type field_type,
6217                              uint32_t field_index,
6218                              uint32_t dex_pc)
6219      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6220        field_index_(field_index) {
6221    SetPackedField<FieldTypeField>(field_type);
6222    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6223    SetRawInputAt(0, obj);
6224    SetRawInputAt(1, value);
6225  }
6226
6227  bool NeedsEnvironment() const OVERRIDE { return true; }
6228  bool CanThrow() const OVERRIDE { return true; }
6229
6230  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6231  uint32_t GetFieldIndex() const { return field_index_; }
6232
6233  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6234
6235 private:
6236  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6237  static constexpr size_t kFieldFieldTypeSize =
6238      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6239  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6240      kFieldFieldType + kFieldFieldTypeSize;
6241  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6242                "Too many packed fields.");
6243  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6244
6245  const uint32_t field_index_;
6246
6247  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6248};
6249
6250class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6251 public:
6252  HUnresolvedStaticFieldGet(DataType::Type field_type,
6253                            uint32_t field_index,
6254                            uint32_t dex_pc)
6255      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6256        field_index_(field_index) {
6257  }
6258
6259  bool NeedsEnvironment() const OVERRIDE { return true; }
6260  bool CanThrow() const OVERRIDE { return true; }
6261
6262  DataType::Type GetFieldType() const { return GetType(); }
6263  uint32_t GetFieldIndex() const { return field_index_; }
6264
6265  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6266
6267 private:
6268  const uint32_t field_index_;
6269
6270  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6271};
6272
6273class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6274 public:
6275  HUnresolvedStaticFieldSet(HInstruction* value,
6276                            DataType::Type field_type,
6277                            uint32_t field_index,
6278                            uint32_t dex_pc)
6279      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6280        field_index_(field_index) {
6281    SetPackedField<FieldTypeField>(field_type);
6282    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6283    SetRawInputAt(0, value);
6284  }
6285
6286  bool NeedsEnvironment() const OVERRIDE { return true; }
6287  bool CanThrow() const OVERRIDE { return true; }
6288
6289  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6290  uint32_t GetFieldIndex() const { return field_index_; }
6291
6292  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6293
6294 private:
6295  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6296  static constexpr size_t kFieldFieldTypeSize =
6297      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6298  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6299      kFieldFieldType + kFieldFieldTypeSize;
6300  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6301                "Too many packed fields.");
6302  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6303
6304  const uint32_t field_index_;
6305
6306  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6307};
6308
6309// Implement the move-exception DEX instruction.
6310class HLoadException FINAL : public HExpression<0> {
6311 public:
6312  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6313      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc) {}
6314
6315  bool CanBeNull() const OVERRIDE { return false; }
6316
6317  DECLARE_INSTRUCTION(LoadException);
6318
6319 private:
6320  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6321};
6322
6323// Implicit part of move-exception which clears thread-local exception storage.
6324// Must not be removed because the runtime expects the TLS to get cleared.
6325class HClearException FINAL : public HTemplateInstruction<0> {
6326 public:
6327  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6328      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6329
6330  DECLARE_INSTRUCTION(ClearException);
6331
6332 private:
6333  DISALLOW_COPY_AND_ASSIGN(HClearException);
6334};
6335
6336class HThrow FINAL : public HTemplateInstruction<1> {
6337 public:
6338  HThrow(HInstruction* exception, uint32_t dex_pc)
6339      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6340    SetRawInputAt(0, exception);
6341  }
6342
6343  bool IsControlFlow() const OVERRIDE { return true; }
6344
6345  bool NeedsEnvironment() const OVERRIDE { return true; }
6346
6347  bool CanThrow() const OVERRIDE { return true; }
6348
6349
6350  DECLARE_INSTRUCTION(Throw);
6351
6352 private:
6353  DISALLOW_COPY_AND_ASSIGN(HThrow);
6354};
6355
6356/**
6357 * Implementation strategies for the code generator of a HInstanceOf
6358 * or `HCheckCast`.
6359 */
6360enum class TypeCheckKind {
6361  kUnresolvedCheck,       // Check against an unresolved type.
6362  kExactCheck,            // Can do a single class compare.
6363  kClassHierarchyCheck,   // Can just walk the super class chain.
6364  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6365  kInterfaceCheck,        // No optimization yet when checking against an interface.
6366  kArrayObjectCheck,      // Can just check if the array is not primitive.
6367  kArrayCheck,            // No optimization yet when checking against a generic array.
6368  kLast = kArrayCheck
6369};
6370
6371std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6372
6373class HInstanceOf FINAL : public HExpression<2> {
6374 public:
6375  HInstanceOf(HInstruction* object,
6376              HLoadClass* constant,
6377              TypeCheckKind check_kind,
6378              uint32_t dex_pc)
6379      : HExpression(DataType::Type::kBool,
6380                    SideEffectsForArchRuntimeCalls(check_kind),
6381                    dex_pc) {
6382    SetPackedField<TypeCheckKindField>(check_kind);
6383    SetPackedFlag<kFlagMustDoNullCheck>(true);
6384    SetRawInputAt(0, object);
6385    SetRawInputAt(1, constant);
6386  }
6387
6388  bool CanBeMoved() const OVERRIDE { return true; }
6389
6390  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6391    return true;
6392  }
6393
6394  bool NeedsEnvironment() const OVERRIDE {
6395    return CanCallRuntime(GetTypeCheckKind());
6396  }
6397
6398  // Used only in code generation.
6399  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6400  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6401  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6402  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6403
6404  static bool CanCallRuntime(TypeCheckKind check_kind) {
6405    // Mips currently does runtime calls for any other checks.
6406    return check_kind != TypeCheckKind::kExactCheck;
6407  }
6408
6409  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6410    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6411  }
6412
6413  DECLARE_INSTRUCTION(InstanceOf);
6414
6415 private:
6416  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6417  static constexpr size_t kFieldTypeCheckKindSize =
6418      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6419  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6420  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6421  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6422  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6423
6424  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6425};
6426
6427class HBoundType FINAL : public HExpression<1> {
6428 public:
6429  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6430      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc),
6431        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6432    SetPackedFlag<kFlagUpperCanBeNull>(true);
6433    SetPackedFlag<kFlagCanBeNull>(true);
6434    DCHECK_EQ(input->GetType(), DataType::Type::kReference);
6435    SetRawInputAt(0, input);
6436  }
6437
6438  // {Get,Set}Upper* should only be used in reference type propagation.
6439  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6440  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6441  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6442
6443  void SetCanBeNull(bool can_be_null) {
6444    DCHECK(GetUpperCanBeNull() || !can_be_null);
6445    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6446  }
6447
6448  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6449
6450  DECLARE_INSTRUCTION(BoundType);
6451
6452 private:
6453  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6454  // is false then CanBeNull() cannot be true).
6455  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6456  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6457  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6458  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6459
6460  // Encodes the most upper class that this instruction can have. In other words
6461  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6462  // It is used to bound the type in cases like:
6463  //   if (x instanceof ClassX) {
6464  //     // uper_bound_ will be ClassX
6465  //   }
6466  ReferenceTypeInfo upper_bound_;
6467
6468  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6469};
6470
6471class HCheckCast FINAL : public HTemplateInstruction<2> {
6472 public:
6473  HCheckCast(HInstruction* object,
6474             HLoadClass* constant,
6475             TypeCheckKind check_kind,
6476             uint32_t dex_pc)
6477      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6478    SetPackedField<TypeCheckKindField>(check_kind);
6479    SetPackedFlag<kFlagMustDoNullCheck>(true);
6480    SetRawInputAt(0, object);
6481    SetRawInputAt(1, constant);
6482  }
6483
6484  bool CanBeMoved() const OVERRIDE { return true; }
6485
6486  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6487    return true;
6488  }
6489
6490  bool NeedsEnvironment() const OVERRIDE {
6491    // Instruction may throw a CheckCastError.
6492    return true;
6493  }
6494
6495  bool CanThrow() const OVERRIDE { return true; }
6496
6497  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6498  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6499  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6500  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6501
6502  DECLARE_INSTRUCTION(CheckCast);
6503
6504 private:
6505  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6506  static constexpr size_t kFieldTypeCheckKindSize =
6507      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6508  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6509  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6510  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6511  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6512
6513  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6514};
6515
6516/**
6517 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6518 * @details We define the combined barrier types that are actually required
6519 * by the Java Memory Model, rather than using exactly the terminology from
6520 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6521 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6522 * store atomicity issues, and the recipes there are not always entirely sufficient.
6523 * The current recipe is as follows:
6524 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6525 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6526 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6527 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6528 *    class has final fields.
6529 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6530 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6531 */
6532enum MemBarrierKind {
6533  kAnyStore,
6534  kLoadAny,
6535  kStoreStore,
6536  kAnyAny,
6537  kNTStoreStore,
6538  kLastBarrierKind = kNTStoreStore
6539};
6540std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6541
6542class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6543 public:
6544  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6545      : HTemplateInstruction(
6546            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6547    SetPackedField<BarrierKindField>(barrier_kind);
6548  }
6549
6550  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6551
6552  DECLARE_INSTRUCTION(MemoryBarrier);
6553
6554 private:
6555  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6556  static constexpr size_t kFieldBarrierKindSize =
6557      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6558  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6559      kFieldBarrierKind + kFieldBarrierKindSize;
6560  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6561                "Too many packed fields.");
6562  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6563
6564  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6565};
6566
6567// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6568// the specified object(s), with respect to any subsequent store that might "publish"
6569// (i.e. make visible) the specified object to another thread.
6570//
6571// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6572// for all final fields (that were set) at the end of the invoked constructor.
6573//
6574// The constructor fence models the freeze actions for the final fields of an object
6575// being constructed (semantically at the end of the constructor). Constructor fences
6576// have a per-object affinity; two separate objects being constructed get two separate
6577// constructor fences.
6578//
6579// (Note: that if calling a super-constructor or forwarding to another constructor,
6580// the freezes would happen at the end of *that* constructor being invoked).
6581//
6582// The memory model guarantees that when the object being constructed is "published" after
6583// constructor completion (i.e. escapes the current thread via a store), then any final field
6584// writes must be observable on other threads (once they observe that publication).
6585//
6586// Further, anything written before the freeze, and read by dereferencing through the final field,
6587// must also be visible (so final object field could itself have an object with non-final fields;
6588// yet the freeze must also extend to them).
6589//
6590// Constructor example:
6591//
6592//     class HasFinal {
6593//        final int field;                              Optimizing IR for <init>()V:
6594//        HasFinal() {
6595//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6596//          // freeze(this.field);                      HConstructorFence(this)
6597//        }                                             HReturn
6598//     }
6599//
6600// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6601// already-initialized classes; in that case the allocation must act as a "default-initializer"
6602// of the object which effectively writes the class pointer "final field".
6603//
6604// For example, we can model default-initialiation as roughly the equivalent of the following:
6605//
6606//     class Object {
6607//       private final Class header;
6608//     }
6609//
6610//  Java code:                                           Optimizing IR:
6611//
6612//     T new_instance<T>() {
6613//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6614//       obj.header = T.class;                           // header write is done by above call.
6615//       // freeze(obj.header)                           HConstructorFence(obj)
6616//       return (T)obj;
6617//     }
6618//
6619// See also:
6620// * CompilerDriver::RequiresConstructorBarrier
6621// * QuasiAtomic::ThreadFenceForConstructor
6622//
6623class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6624                                  // A fence has variable inputs because the inputs can be removed
6625                                  // after prepare_for_register_allocation phase.
6626                                  // (TODO: In the future a fence could freeze multiple objects
6627                                  //        after merging two fences together.)
6628 public:
6629  // `fence_object` is the reference that needs to be protected for correct publication.
6630  //
6631  // It makes sense in the following situations:
6632  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6633  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6634  //
6635  // After construction the `fence_object` becomes the 0th input.
6636  // This is not an input in a real sense, but just a convenient place to stash the information
6637  // about the associated object.
6638  HConstructorFence(HInstruction* fence_object,
6639                    uint32_t dex_pc,
6640                    ArenaAllocator* arena)
6641    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6642    // constraints described in the class header. We claim that these SideEffects constraints
6643    // enforce a superset of the real constraints.
6644    //
6645    // The ordering described above is conservatively modeled with SideEffects as follows:
6646    //
6647    // * To prevent reordering of the publication stores:
6648    // ----> "Reads of objects" is the initial SideEffect.
6649    // * For every primitive final field store in the constructor:
6650    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6651    // * If there are any stores to reference final fields in the constructor:
6652    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6653    //       that are reachable from `fence_object` also need to be prevented for reordering
6654    //       (and we do not want to do alias analysis to figure out what those stores are).
6655    //
6656    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6657    // even more conservative approach than the one described above, and prevents all of the
6658    // above reordering without analyzing any of the instructions in the constructor.
6659    //
6660    // If in a later phase we discover that there are no writes to reference final fields,
6661    // we can refine the side effect to a smaller set of type reads (see above constraints).
6662      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6663                                      dex_pc,
6664                                      arena,
6665                                      /* number_of_inputs */ 1,
6666                                      kArenaAllocConstructorFenceInputs) {
6667    DCHECK(fence_object != nullptr);
6668    SetRawInputAt(0, fence_object);
6669  }
6670
6671  // The object associated with this constructor fence.
6672  //
6673  // (Note: This will be null after the prepare_for_register_allocation phase,
6674  // as all constructor fence inputs are removed there).
6675  HInstruction* GetFenceObject() const {
6676    return InputAt(0);
6677  }
6678
6679  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6680  // - Delete `fence_use` from `this`'s use list.
6681  // - Delete `this` from `fence_use`'s inputs list.
6682  // - If the `fence_use` is dead, remove it from the graph.
6683  //
6684  // A fence is considered dead once it no longer has any uses
6685  // and all of the inputs are dead.
6686  //
6687  // This must *not* be called during/after prepare_for_register_allocation,
6688  // because that removes all the inputs to the fences but the fence is actually
6689  // still considered live.
6690  //
6691  // Returns how many HConstructorFence instructions were removed from graph.
6692  static size_t RemoveConstructorFences(HInstruction* instruction);
6693
6694  // Combine all inputs of `this` and `other` instruction and remove
6695  // `other` from the graph.
6696  //
6697  // Inputs are unique after the merge.
6698  //
6699  // Requirement: `this` must not be the same as `other.
6700  void Merge(HConstructorFence* other);
6701
6702  // Check if this constructor fence is protecting
6703  // an HNewInstance or HNewArray that is also the immediate
6704  // predecessor of `this`.
6705  //
6706  // If `ignore_inputs` is true, then the immediate predecessor doesn't need
6707  // to be one of the inputs of `this`.
6708  //
6709  // Returns the associated HNewArray or HNewInstance,
6710  // or null otherwise.
6711  HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
6712
6713  DECLARE_INSTRUCTION(ConstructorFence);
6714
6715 private:
6716  DISALLOW_COPY_AND_ASSIGN(HConstructorFence);
6717};
6718
6719class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6720 public:
6721  enum class OperationKind {
6722    kEnter,
6723    kExit,
6724    kLast = kExit
6725  };
6726
6727  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6728    : HTemplateInstruction(
6729          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6730          dex_pc) {
6731    SetPackedField<OperationKindField>(kind);
6732    SetRawInputAt(0, object);
6733  }
6734
6735  // Instruction may go into runtime, so we need an environment.
6736  bool NeedsEnvironment() const OVERRIDE { return true; }
6737
6738  bool CanThrow() const OVERRIDE {
6739    // Verifier guarantees that monitor-exit cannot throw.
6740    // This is important because it allows the HGraphBuilder to remove
6741    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6742    return IsEnter();
6743  }
6744
6745  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6746  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6747
6748  DECLARE_INSTRUCTION(MonitorOperation);
6749
6750 private:
6751  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6752  static constexpr size_t kFieldOperationKindSize =
6753      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6754  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6755      kFieldOperationKind + kFieldOperationKindSize;
6756  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6757                "Too many packed fields.");
6758  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6759
6760 private:
6761  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6762};
6763
6764class HSelect FINAL : public HExpression<3> {
6765 public:
6766  HSelect(HInstruction* condition,
6767          HInstruction* true_value,
6768          HInstruction* false_value,
6769          uint32_t dex_pc)
6770      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6771    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6772
6773    // First input must be `true_value` or `false_value` to allow codegens to
6774    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6775    // that architectures which implement HSelect as a conditional move also
6776    // will not need to invert the condition.
6777    SetRawInputAt(0, false_value);
6778    SetRawInputAt(1, true_value);
6779    SetRawInputAt(2, condition);
6780  }
6781
6782  HInstruction* GetFalseValue() const { return InputAt(0); }
6783  HInstruction* GetTrueValue() const { return InputAt(1); }
6784  HInstruction* GetCondition() const { return InputAt(2); }
6785
6786  bool CanBeMoved() const OVERRIDE { return true; }
6787  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6788    return true;
6789  }
6790
6791  bool CanBeNull() const OVERRIDE {
6792    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6793  }
6794
6795  DECLARE_INSTRUCTION(Select);
6796
6797 private:
6798  DISALLOW_COPY_AND_ASSIGN(HSelect);
6799};
6800
6801class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6802 public:
6803  MoveOperands(Location source,
6804               Location destination,
6805               DataType::Type type,
6806               HInstruction* instruction)
6807      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6808
6809  Location GetSource() const { return source_; }
6810  Location GetDestination() const { return destination_; }
6811
6812  void SetSource(Location value) { source_ = value; }
6813  void SetDestination(Location value) { destination_ = value; }
6814
6815  // The parallel move resolver marks moves as "in-progress" by clearing the
6816  // destination (but not the source).
6817  Location MarkPending() {
6818    DCHECK(!IsPending());
6819    Location dest = destination_;
6820    destination_ = Location::NoLocation();
6821    return dest;
6822  }
6823
6824  void ClearPending(Location dest) {
6825    DCHECK(IsPending());
6826    destination_ = dest;
6827  }
6828
6829  bool IsPending() const {
6830    DCHECK(source_.IsValid() || destination_.IsInvalid());
6831    return destination_.IsInvalid() && source_.IsValid();
6832  }
6833
6834  // True if this blocks a move from the given location.
6835  bool Blocks(Location loc) const {
6836    return !IsEliminated() && source_.OverlapsWith(loc);
6837  }
6838
6839  // A move is redundant if it's been eliminated, if its source and
6840  // destination are the same, or if its destination is unneeded.
6841  bool IsRedundant() const {
6842    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6843  }
6844
6845  // We clear both operands to indicate move that's been eliminated.
6846  void Eliminate() {
6847    source_ = destination_ = Location::NoLocation();
6848  }
6849
6850  bool IsEliminated() const {
6851    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6852    return source_.IsInvalid();
6853  }
6854
6855  DataType::Type GetType() const { return type_; }
6856
6857  bool Is64BitMove() const {
6858    return DataType::Is64BitType(type_);
6859  }
6860
6861  HInstruction* GetInstruction() const { return instruction_; }
6862
6863 private:
6864  Location source_;
6865  Location destination_;
6866  // The type this move is for.
6867  DataType::Type type_;
6868  // The instruction this move is assocatied with. Null when this move is
6869  // for moving an input in the expected locations of user (including a phi user).
6870  // This is only used in debug mode, to ensure we do not connect interval siblings
6871  // in the same parallel move.
6872  HInstruction* instruction_;
6873};
6874
6875std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6876
6877static constexpr size_t kDefaultNumberOfMoves = 4;
6878
6879class HParallelMove FINAL : public HTemplateInstruction<0> {
6880 public:
6881  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6882      : HTemplateInstruction(SideEffects::None(), dex_pc),
6883        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6884    moves_.reserve(kDefaultNumberOfMoves);
6885  }
6886
6887  void AddMove(Location source,
6888               Location destination,
6889               DataType::Type type,
6890               HInstruction* instruction) {
6891    DCHECK(source.IsValid());
6892    DCHECK(destination.IsValid());
6893    if (kIsDebugBuild) {
6894      if (instruction != nullptr) {
6895        for (const MoveOperands& move : moves_) {
6896          if (move.GetInstruction() == instruction) {
6897            // Special case the situation where the move is for the spill slot
6898            // of the instruction.
6899            if ((GetPrevious() == instruction)
6900                || ((GetPrevious() == nullptr)
6901                    && instruction->IsPhi()
6902                    && instruction->GetBlock() == GetBlock())) {
6903              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6904                  << "Doing parallel moves for the same instruction.";
6905            } else {
6906              DCHECK(false) << "Doing parallel moves for the same instruction.";
6907            }
6908          }
6909        }
6910      }
6911      for (const MoveOperands& move : moves_) {
6912        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6913            << "Overlapped destination for two moves in a parallel move: "
6914            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6915            << source << " ==> " << destination;
6916      }
6917    }
6918    moves_.emplace_back(source, destination, type, instruction);
6919  }
6920
6921  MoveOperands* MoveOperandsAt(size_t index) {
6922    return &moves_[index];
6923  }
6924
6925  size_t NumMoves() const { return moves_.size(); }
6926
6927  DECLARE_INSTRUCTION(ParallelMove);
6928
6929 private:
6930  ArenaVector<MoveOperands> moves_;
6931
6932  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6933};
6934
6935}  // namespace art
6936
6937#include "nodes_vector.h"
6938
6939#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6940#include "nodes_shared.h"
6941#endif
6942#ifdef ART_ENABLE_CODEGEN_mips
6943#include "nodes_mips.h"
6944#endif
6945#ifdef ART_ENABLE_CODEGEN_x86
6946#include "nodes_x86.h"
6947#endif
6948
6949namespace art {
6950
6951class OptimizingCompilerStats;
6952
6953class HGraphVisitor : public ValueObject {
6954 public:
6955  explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
6956      : stats_(stats),
6957        graph_(graph) {}
6958  virtual ~HGraphVisitor() {}
6959
6960  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6961  virtual void VisitBasicBlock(HBasicBlock* block);
6962
6963  // Visit the graph following basic block insertion order.
6964  void VisitInsertionOrder();
6965
6966  // Visit the graph following dominator tree reverse post-order.
6967  void VisitReversePostOrder();
6968
6969  HGraph* GetGraph() const { return graph_; }
6970
6971  // Visit functions for instruction classes.
6972#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6973  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6974
6975  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6976
6977#undef DECLARE_VISIT_INSTRUCTION
6978
6979 protected:
6980  OptimizingCompilerStats* stats_;
6981
6982 private:
6983  HGraph* const graph_;
6984
6985  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6986};
6987
6988class HGraphDelegateVisitor : public HGraphVisitor {
6989 public:
6990  explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
6991      : HGraphVisitor(graph, stats) {}
6992  virtual ~HGraphDelegateVisitor() {}
6993
6994  // Visit functions that delegate to to super class.
6995#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6996  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6997
6998  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6999
7000#undef DECLARE_VISIT_INSTRUCTION
7001
7002 private:
7003  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
7004};
7005
7006// Iterator over the blocks that art part of the loop. Includes blocks part
7007// of an inner loop. The order in which the blocks are iterated is on their
7008// block id.
7009class HBlocksInLoopIterator : public ValueObject {
7010 public:
7011  explicit HBlocksInLoopIterator(const HLoopInformation& info)
7012      : blocks_in_loop_(info.GetBlocks()),
7013        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
7014        index_(0) {
7015    if (!blocks_in_loop_.IsBitSet(index_)) {
7016      Advance();
7017    }
7018  }
7019
7020  bool Done() const { return index_ == blocks_.size(); }
7021  HBasicBlock* Current() const { return blocks_[index_]; }
7022  void Advance() {
7023    ++index_;
7024    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7025      if (blocks_in_loop_.IsBitSet(index_)) {
7026        break;
7027      }
7028    }
7029  }
7030
7031 private:
7032  const BitVector& blocks_in_loop_;
7033  const ArenaVector<HBasicBlock*>& blocks_;
7034  size_t index_;
7035
7036  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
7037};
7038
7039// Iterator over the blocks that art part of the loop. Includes blocks part
7040// of an inner loop. The order in which the blocks are iterated is reverse
7041// post order.
7042class HBlocksInLoopReversePostOrderIterator : public ValueObject {
7043 public:
7044  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
7045      : blocks_in_loop_(info.GetBlocks()),
7046        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
7047        index_(0) {
7048    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7049      Advance();
7050    }
7051  }
7052
7053  bool Done() const { return index_ == blocks_.size(); }
7054  HBasicBlock* Current() const { return blocks_[index_]; }
7055  void Advance() {
7056    ++index_;
7057    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7058      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7059        break;
7060      }
7061    }
7062  }
7063
7064 private:
7065  const BitVector& blocks_in_loop_;
7066  const ArenaVector<HBasicBlock*>& blocks_;
7067  size_t index_;
7068
7069  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7070};
7071
7072// Returns int64_t value of a properly typed constant.
7073inline int64_t Int64FromConstant(HConstant* constant) {
7074  if (constant->IsIntConstant()) {
7075    return constant->AsIntConstant()->GetValue();
7076  } else if (constant->IsLongConstant()) {
7077    return constant->AsLongConstant()->GetValue();
7078  } else {
7079    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7080    return 0;
7081  }
7082}
7083
7084// Returns true iff instruction is an integral constant (and sets value on success).
7085inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7086  if (instruction->IsIntConstant()) {
7087    *value = instruction->AsIntConstant()->GetValue();
7088    return true;
7089  } else if (instruction->IsLongConstant()) {
7090    *value = instruction->AsLongConstant()->GetValue();
7091    return true;
7092  } else if (instruction->IsNullConstant()) {
7093    *value = 0;
7094    return true;
7095  }
7096  return false;
7097}
7098
7099// Returns true iff instruction is the given integral constant.
7100inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
7101  int64_t val = 0;
7102  return IsInt64AndGet(instruction, &val) && val == value;
7103}
7104
7105// Returns true iff instruction is a zero bit pattern.
7106inline bool IsZeroBitPattern(HInstruction* instruction) {
7107  return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
7108}
7109
7110#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7111  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7112  inline const H##type* HInstruction::As##type() const {                       \
7113    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7114  }                                                                            \
7115  inline H##type* HInstruction::As##type() {                                   \
7116    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7117  }
7118
7119  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7120#undef INSTRUCTION_TYPE_CHECK
7121
7122// Create space in `blocks` for adding `number_of_new_blocks` entries
7123// starting at location `at`. Blocks after `at` are moved accordingly.
7124inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7125                        size_t number_of_new_blocks,
7126                        size_t after) {
7127  DCHECK_LT(after, blocks->size());
7128  size_t old_size = blocks->size();
7129  size_t new_size = old_size + number_of_new_blocks;
7130  blocks->resize(new_size);
7131  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7132}
7133
7134/*
7135 * Hunt "under the hood" of array lengths (leading to array references),
7136 * null checks (also leading to array references), and new arrays
7137 * (leading to the actual length). This makes it more likely related
7138 * instructions become actually comparable.
7139 */
7140inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7141  while (instruction->IsArrayLength() ||
7142         instruction->IsNullCheck() ||
7143         instruction->IsNewArray()) {
7144    instruction = instruction->IsNewArray()
7145        ? instruction->AsNewArray()->GetLength()
7146        : instruction->InputAt(0);
7147  }
7148  return instruction;
7149}
7150
7151void RemoveEnvironmentUses(HInstruction* instruction);
7152bool HasEnvironmentUsedByOthers(HInstruction* instruction);
7153void ResetEnvironmentInputRecords(HInstruction* instruction);
7154
7155}  // namespace art
7156
7157#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7158