nodes.h revision 69d310e0317e2fce97bf8c9c133c5c2c0332e61d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "data_type.h"
32#include "deoptimization_kind.h"
33#include "dex_file.h"
34#include "dex_file_types.h"
35#include "entrypoints/quick/quick_entrypoints_enum.h"
36#include "handle.h"
37#include "handle_scope.h"
38#include "intrinsics_enum.h"
39#include "invoke_type.h"
40#include "locations.h"
41#include "method_reference.h"
42#include "mirror/class.h"
43#include "offsets.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class ArenaStack;
49class GraphChecker;
50class HBasicBlock;
51class HConstructorFence;
52class HCurrentMethod;
53class HDoubleConstant;
54class HEnvironment;
55class HFloatConstant;
56class HGraphBuilder;
57class HGraphVisitor;
58class HInstruction;
59class HIntConstant;
60class HInvoke;
61class HLongConstant;
62class HNullConstant;
63class HParameterValue;
64class HPhi;
65class HSuspendCheck;
66class HTryBoundary;
67class LiveInterval;
68class LocationSummary;
69class SlowPathCode;
70class SsaBuilder;
71
72namespace mirror {
73class DexCache;
74}  // namespace mirror
75
76static const int kDefaultNumberOfBlocks = 8;
77static const int kDefaultNumberOfSuccessors = 2;
78static const int kDefaultNumberOfPredecessors = 2;
79static const int kDefaultNumberOfExceptionalPredecessors = 0;
80static const int kDefaultNumberOfDominatedBlocks = 1;
81static const int kDefaultNumberOfBackEdges = 1;
82
83// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
84static constexpr int32_t kMaxIntShiftDistance = 0x1f;
85// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
86static constexpr int32_t kMaxLongShiftDistance = 0x3f;
87
88static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
89static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
90
91static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
92
93static constexpr uint32_t kNoDexPc = -1;
94
95inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
96  // For the purposes of the compiler, the dex files must actually be the same object
97  // if we want to safely treat them as the same. This is especially important for JIT
98  // as custom class loaders can open the same underlying file (or memory) multiple
99  // times and provide different class resolution but no two class loaders should ever
100  // use the same DexFile object - doing so is an unsupported hack that can lead to
101  // all sorts of weird failures.
102  return &lhs == &rhs;
103}
104
105enum IfCondition {
106  // All types.
107  kCondEQ,  // ==
108  kCondNE,  // !=
109  // Signed integers and floating-point numbers.
110  kCondLT,  // <
111  kCondLE,  // <=
112  kCondGT,  // >
113  kCondGE,  // >=
114  // Unsigned integers.
115  kCondB,   // <
116  kCondBE,  // <=
117  kCondA,   // >
118  kCondAE,  // >=
119  // First and last aliases.
120  kCondFirst = kCondEQ,
121  kCondLast = kCondAE,
122};
123
124enum GraphAnalysisResult {
125  kAnalysisSkipped,
126  kAnalysisInvalidBytecode,
127  kAnalysisFailThrowCatchLoop,
128  kAnalysisFailAmbiguousArrayOp,
129  kAnalysisSuccess,
130};
131
132template <typename T>
133static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
134  return static_cast<typename std::make_unsigned<T>::type>(x);
135}
136
137class HInstructionList : public ValueObject {
138 public:
139  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
140
141  void AddInstruction(HInstruction* instruction);
142  void RemoveInstruction(HInstruction* instruction);
143
144  // Insert `instruction` before/after an existing instruction `cursor`.
145  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
146  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
147
148  // Return true if this list contains `instruction`.
149  bool Contains(HInstruction* instruction) const;
150
151  // Return true if `instruction1` is found before `instruction2` in
152  // this instruction list and false otherwise.  Abort if none
153  // of these instructions is found.
154  bool FoundBefore(const HInstruction* instruction1,
155                   const HInstruction* instruction2) const;
156
157  bool IsEmpty() const { return first_instruction_ == nullptr; }
158  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
159
160  // Update the block of all instructions to be `block`.
161  void SetBlockOfInstructions(HBasicBlock* block) const;
162
163  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
164  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
165  void Add(const HInstructionList& instruction_list);
166
167  // Return the number of instructions in the list. This is an expensive operation.
168  size_t CountSize() const;
169
170 private:
171  HInstruction* first_instruction_;
172  HInstruction* last_instruction_;
173
174  friend class HBasicBlock;
175  friend class HGraph;
176  friend class HInstruction;
177  friend class HInstructionIterator;
178  friend class HInstructionIteratorHandleChanges;
179  friend class HBackwardInstructionIterator;
180
181  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
182};
183
184class ReferenceTypeInfo : ValueObject {
185 public:
186  typedef Handle<mirror::Class> TypeHandle;
187
188  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
189
190  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
191    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
192  }
193
194  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
195    return ReferenceTypeInfo(type_handle, is_exact);
196  }
197
198  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
199
200  static bool IsValidHandle(TypeHandle handle) {
201    return handle.GetReference() != nullptr;
202  }
203
204  bool IsValid() const {
205    return IsValidHandle(type_handle_);
206  }
207
208  bool IsExact() const { return is_exact_; }
209
210  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
211    DCHECK(IsValid());
212    return GetTypeHandle()->IsObjectClass();
213  }
214
215  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
216    DCHECK(IsValid());
217    return GetTypeHandle()->IsStringClass();
218  }
219
220  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
221    DCHECK(IsValid());
222    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
223  }
224
225  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
226    DCHECK(IsValid());
227    return GetTypeHandle()->IsInterface();
228  }
229
230  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
231    DCHECK(IsValid());
232    return GetTypeHandle()->IsArrayClass();
233  }
234
235  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
236    DCHECK(IsValid());
237    return GetTypeHandle()->IsPrimitiveArray();
238  }
239
240  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
241    DCHECK(IsValid());
242    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
243  }
244
245  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
246    DCHECK(IsValid());
247    if (!IsExact()) return false;
248    if (!IsArrayClass()) return false;
249    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
250  }
251
252  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
253    DCHECK(IsValid());
254    if (!IsExact()) return false;
255    if (!IsArrayClass()) return false;
256    if (!rti.IsArrayClass()) return false;
257    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
258        rti.GetTypeHandle()->GetComponentType());
259  }
260
261  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
262
263  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
264    DCHECK(IsValid());
265    DCHECK(rti.IsValid());
266    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
267  }
268
269  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
270    DCHECK(IsValid());
271    DCHECK(rti.IsValid());
272    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
273        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
274  }
275
276  // Returns true if the type information provide the same amount of details.
277  // Note that it does not mean that the instructions have the same actual type
278  // (because the type can be the result of a merge).
279  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
280    if (!IsValid() && !rti.IsValid()) {
281      // Invalid types are equal.
282      return true;
283    }
284    if (!IsValid() || !rti.IsValid()) {
285      // One is valid, the other not.
286      return false;
287    }
288    return IsExact() == rti.IsExact()
289        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
290  }
291
292 private:
293  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
294  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
295      : type_handle_(type_handle), is_exact_(is_exact) { }
296
297  // The class of the object.
298  TypeHandle type_handle_;
299  // Whether or not the type is exact or a superclass of the actual type.
300  // Whether or not we have any information about this type.
301  bool is_exact_;
302};
303
304std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
305
306// Control-flow graph of a method. Contains a list of basic blocks.
307class HGraph : public ArenaObject<kArenaAllocGraph> {
308 public:
309  HGraph(ArenaAllocator* allocator,
310         ArenaStack* arena_stack,
311         const DexFile& dex_file,
312         uint32_t method_idx,
313         InstructionSet instruction_set,
314         InvokeType invoke_type = kInvalidInvokeType,
315         bool debuggable = false,
316         bool osr = false,
317         int start_instruction_id = 0)
318      : allocator_(allocator),
319        arena_stack_(arena_stack),
320        blocks_(allocator->Adapter(kArenaAllocBlockList)),
321        reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
322        linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
323        entry_block_(nullptr),
324        exit_block_(nullptr),
325        maximum_number_of_out_vregs_(0),
326        number_of_vregs_(0),
327        number_of_in_vregs_(0),
328        temporaries_vreg_slots_(0),
329        has_bounds_checks_(false),
330        has_try_catch_(false),
331        has_simd_(false),
332        has_loops_(false),
333        has_irreducible_loops_(false),
334        debuggable_(debuggable),
335        current_instruction_id_(start_instruction_id),
336        dex_file_(dex_file),
337        method_idx_(method_idx),
338        invoke_type_(invoke_type),
339        in_ssa_form_(false),
340        number_of_cha_guards_(0),
341        instruction_set_(instruction_set),
342        cached_null_constant_(nullptr),
343        cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
344        cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
345        cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
346        cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
347        cached_current_method_(nullptr),
348        art_method_(nullptr),
349        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
350        osr_(osr),
351        cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
352    blocks_.reserve(kDefaultNumberOfBlocks);
353  }
354
355  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
356  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
357
358  ArenaAllocator* GetAllocator() const { return allocator_; }
359  ArenaStack* GetArenaStack() const { return arena_stack_; }
360  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
361
362  bool IsInSsaForm() const { return in_ssa_form_; }
363  void SetInSsaForm() { in_ssa_form_ = true; }
364
365  HBasicBlock* GetEntryBlock() const { return entry_block_; }
366  HBasicBlock* GetExitBlock() const { return exit_block_; }
367  bool HasExitBlock() const { return exit_block_ != nullptr; }
368
369  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
370  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
371
372  void AddBlock(HBasicBlock* block);
373
374  void ComputeDominanceInformation();
375  void ClearDominanceInformation();
376  void ClearLoopInformation();
377  void FindBackEdges(ArenaBitVector* visited);
378  GraphAnalysisResult BuildDominatorTree();
379  void SimplifyCFG();
380  void SimplifyCatchBlocks();
381
382  // Analyze all natural loops in this graph. Returns a code specifying that it
383  // was successful or the reason for failure. The method will fail if a loop
384  // is a throw-catch loop, i.e. the header is a catch block.
385  GraphAnalysisResult AnalyzeLoops() const;
386
387  // Iterate over blocks to compute try block membership. Needs reverse post
388  // order and loop information.
389  void ComputeTryBlockInformation();
390
391  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
392  // Returns the instruction to replace the invoke expression or null if the
393  // invoke is for a void method. Note that the caller is responsible for replacing
394  // and removing the invoke instruction.
395  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
396
397  // Update the loop and try membership of `block`, which was spawned from `reference`.
398  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
399  // should be the new back edge.
400  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
401                                             HBasicBlock* reference,
402                                             bool replace_if_back_edge);
403
404  // Need to add a couple of blocks to test if the loop body is entered and
405  // put deoptimization instructions, etc.
406  void TransformLoopHeaderForBCE(HBasicBlock* header);
407
408  // Adds a new loop directly after the loop with the given header and exit.
409  // Returns the new preheader.
410  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
411                                             HBasicBlock* body,
412                                             HBasicBlock* exit);
413
414  // Removes `block` from the graph. Assumes `block` has been disconnected from
415  // other blocks and has no instructions or phis.
416  void DeleteDeadEmptyBlock(HBasicBlock* block);
417
418  // Splits the edge between `block` and `successor` while preserving the
419  // indices in the predecessor/successor lists. If there are multiple edges
420  // between the blocks, the lowest indices are used.
421  // Returns the new block which is empty and has the same dex pc as `successor`.
422  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
423
424  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
425  void OrderLoopHeaderPredecessors(HBasicBlock* header);
426  void SimplifyLoop(HBasicBlock* header);
427
428  int32_t GetNextInstructionId() {
429    CHECK_NE(current_instruction_id_, INT32_MAX);
430    return current_instruction_id_++;
431  }
432
433  int32_t GetCurrentInstructionId() const {
434    return current_instruction_id_;
435  }
436
437  void SetCurrentInstructionId(int32_t id) {
438    CHECK_GE(id, current_instruction_id_);
439    current_instruction_id_ = id;
440  }
441
442  uint16_t GetMaximumNumberOfOutVRegs() const {
443    return maximum_number_of_out_vregs_;
444  }
445
446  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
447    maximum_number_of_out_vregs_ = new_value;
448  }
449
450  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
451    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
452  }
453
454  void UpdateTemporariesVRegSlots(size_t slots) {
455    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
456  }
457
458  size_t GetTemporariesVRegSlots() const {
459    DCHECK(!in_ssa_form_);
460    return temporaries_vreg_slots_;
461  }
462
463  void SetNumberOfVRegs(uint16_t number_of_vregs) {
464    number_of_vregs_ = number_of_vregs;
465  }
466
467  uint16_t GetNumberOfVRegs() const {
468    return number_of_vregs_;
469  }
470
471  void SetNumberOfInVRegs(uint16_t value) {
472    number_of_in_vregs_ = value;
473  }
474
475  uint16_t GetNumberOfInVRegs() const {
476    return number_of_in_vregs_;
477  }
478
479  uint16_t GetNumberOfLocalVRegs() const {
480    DCHECK(!in_ssa_form_);
481    return number_of_vregs_ - number_of_in_vregs_;
482  }
483
484  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
485    return reverse_post_order_;
486  }
487
488  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
489    DCHECK(GetReversePostOrder()[0] == entry_block_);
490    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
491  }
492
493  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
494    return ReverseRange(GetReversePostOrder());
495  }
496
497  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
498    return linear_order_;
499  }
500
501  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
502    return ReverseRange(GetLinearOrder());
503  }
504
505  bool HasBoundsChecks() const {
506    return has_bounds_checks_;
507  }
508
509  void SetHasBoundsChecks(bool value) {
510    has_bounds_checks_ = value;
511  }
512
513  bool IsDebuggable() const { return debuggable_; }
514
515  // Returns a constant of the given type and value. If it does not exist
516  // already, it is created and inserted into the graph. This method is only for
517  // integral types.
518  HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
519
520  // TODO: This is problematic for the consistency of reference type propagation
521  // because it can be created anytime after the pass and thus it will be left
522  // with an invalid type.
523  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
524
525  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
526    return CreateConstant(value, &cached_int_constants_, dex_pc);
527  }
528  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
529    return CreateConstant(value, &cached_long_constants_, dex_pc);
530  }
531  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
532    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
533  }
534  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
535    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
536  }
537
538  HCurrentMethod* GetCurrentMethod();
539
540  const DexFile& GetDexFile() const {
541    return dex_file_;
542  }
543
544  uint32_t GetMethodIdx() const {
545    return method_idx_;
546  }
547
548  // Get the method name (without the signature), e.g. "<init>"
549  const char* GetMethodName() const;
550
551  // Get the pretty method name (class + name + optionally signature).
552  std::string PrettyMethod(bool with_signature = true) const;
553
554  InvokeType GetInvokeType() const {
555    return invoke_type_;
556  }
557
558  InstructionSet GetInstructionSet() const {
559    return instruction_set_;
560  }
561
562  bool IsCompilingOsr() const { return osr_; }
563
564  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
565    return cha_single_implementation_list_;
566  }
567
568  void AddCHASingleImplementationDependency(ArtMethod* method) {
569    cha_single_implementation_list_.insert(method);
570  }
571
572  bool HasShouldDeoptimizeFlag() const {
573    return number_of_cha_guards_ != 0;
574  }
575
576  bool HasTryCatch() const { return has_try_catch_; }
577  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
578
579  bool HasSIMD() const { return has_simd_; }
580  void SetHasSIMD(bool value) { has_simd_ = value; }
581
582  bool HasLoops() const { return has_loops_; }
583  void SetHasLoops(bool value) { has_loops_ = value; }
584
585  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
586  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
587
588  ArtMethod* GetArtMethod() const { return art_method_; }
589  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
590
591  // Returns an instruction with the opposite Boolean value from 'cond'.
592  // The instruction has been inserted into the graph, either as a constant, or
593  // before cursor.
594  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
595
596  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
597
598  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
599  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
600  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
601
602 private:
603  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
604  void RemoveDeadBlocks(const ArenaBitVector& visited);
605
606  template <class InstructionType, typename ValueType>
607  InstructionType* CreateConstant(ValueType value,
608                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
609                                  uint32_t dex_pc = kNoDexPc) {
610    // Try to find an existing constant of the given value.
611    InstructionType* constant = nullptr;
612    auto cached_constant = cache->find(value);
613    if (cached_constant != cache->end()) {
614      constant = cached_constant->second;
615    }
616
617    // If not found or previously deleted, create and cache a new instruction.
618    // Don't bother reviving a previously deleted instruction, for simplicity.
619    if (constant == nullptr || constant->GetBlock() == nullptr) {
620      constant = new (allocator_) InstructionType(value, dex_pc);
621      cache->Overwrite(value, constant);
622      InsertConstant(constant);
623    }
624    return constant;
625  }
626
627  void InsertConstant(HConstant* instruction);
628
629  // Cache a float constant into the graph. This method should only be
630  // called by the SsaBuilder when creating "equivalent" instructions.
631  void CacheFloatConstant(HFloatConstant* constant);
632
633  // See CacheFloatConstant comment.
634  void CacheDoubleConstant(HDoubleConstant* constant);
635
636  ArenaAllocator* const allocator_;
637  ArenaStack* const arena_stack_;
638
639  // List of blocks in insertion order.
640  ArenaVector<HBasicBlock*> blocks_;
641
642  // List of blocks to perform a reverse post order tree traversal.
643  ArenaVector<HBasicBlock*> reverse_post_order_;
644
645  // List of blocks to perform a linear order tree traversal. Unlike the reverse
646  // post order, this order is not incrementally kept up-to-date.
647  ArenaVector<HBasicBlock*> linear_order_;
648
649  HBasicBlock* entry_block_;
650  HBasicBlock* exit_block_;
651
652  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
653  uint16_t maximum_number_of_out_vregs_;
654
655  // The number of virtual registers in this method. Contains the parameters.
656  uint16_t number_of_vregs_;
657
658  // The number of virtual registers used by parameters of this method.
659  uint16_t number_of_in_vregs_;
660
661  // Number of vreg size slots that the temporaries use (used in baseline compiler).
662  size_t temporaries_vreg_slots_;
663
664  // Flag whether there are bounds checks in the graph. We can skip
665  // BCE if it's false. It's only best effort to keep it up to date in
666  // the presence of code elimination so there might be false positives.
667  bool has_bounds_checks_;
668
669  // Flag whether there are try/catch blocks in the graph. We will skip
670  // try/catch-related passes if it's false. It's only best effort to keep
671  // it up to date in the presence of code elimination so there might be
672  // false positives.
673  bool has_try_catch_;
674
675  // Flag whether SIMD instructions appear in the graph. If true, the
676  // code generators may have to be more careful spilling the wider
677  // contents of SIMD registers.
678  bool has_simd_;
679
680  // Flag whether there are any loops in the graph. We can skip loop
681  // optimization if it's false. It's only best effort to keep it up
682  // to date in the presence of code elimination so there might be false
683  // positives.
684  bool has_loops_;
685
686  // Flag whether there are any irreducible loops in the graph. It's only
687  // best effort to keep it up to date in the presence of code elimination
688  // so there might be false positives.
689  bool has_irreducible_loops_;
690
691  // Indicates whether the graph should be compiled in a way that
692  // ensures full debuggability. If false, we can apply more
693  // aggressive optimizations that may limit the level of debugging.
694  const bool debuggable_;
695
696  // The current id to assign to a newly added instruction. See HInstruction.id_.
697  int32_t current_instruction_id_;
698
699  // The dex file from which the method is from.
700  const DexFile& dex_file_;
701
702  // The method index in the dex file.
703  const uint32_t method_idx_;
704
705  // If inlined, this encodes how the callee is being invoked.
706  const InvokeType invoke_type_;
707
708  // Whether the graph has been transformed to SSA form. Only used
709  // in debug mode to ensure we are not using properties only valid
710  // for non-SSA form (like the number of temporaries).
711  bool in_ssa_form_;
712
713  // Number of CHA guards in the graph. Used to short-circuit the
714  // CHA guard optimization pass when there is no CHA guard left.
715  uint32_t number_of_cha_guards_;
716
717  const InstructionSet instruction_set_;
718
719  // Cached constants.
720  HNullConstant* cached_null_constant_;
721  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
722  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
723  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
724  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
725
726  HCurrentMethod* cached_current_method_;
727
728  // The ArtMethod this graph is for. Note that for AOT, it may be null,
729  // for example for methods whose declaring class could not be resolved
730  // (such as when the superclass could not be found).
731  ArtMethod* art_method_;
732
733  // Keep the RTI of inexact Object to avoid having to pass stack handle
734  // collection pointer to passes which may create NullConstant.
735  ReferenceTypeInfo inexact_object_rti_;
736
737  // Whether we are compiling this graph for on stack replacement: this will
738  // make all loops seen as irreducible and emit special stack maps to mark
739  // compiled code entries which the interpreter can directly jump to.
740  const bool osr_;
741
742  // List of methods that are assumed to have single implementation.
743  ArenaSet<ArtMethod*> cha_single_implementation_list_;
744
745  friend class SsaBuilder;           // For caching constants.
746  friend class SsaLivenessAnalysis;  // For the linear order.
747  friend class HInliner;             // For the reverse post order.
748  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
749  DISALLOW_COPY_AND_ASSIGN(HGraph);
750};
751
752class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
753 public:
754  HLoopInformation(HBasicBlock* header, HGraph* graph)
755      : header_(header),
756        suspend_check_(nullptr),
757        irreducible_(false),
758        contains_irreducible_loop_(false),
759        back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
760        // Make bit vector growable, as the number of blocks may change.
761        blocks_(graph->GetAllocator(),
762                graph->GetBlocks().size(),
763                true,
764                kArenaAllocLoopInfoBackEdges) {
765    back_edges_.reserve(kDefaultNumberOfBackEdges);
766  }
767
768  bool IsIrreducible() const { return irreducible_; }
769  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
770
771  void Dump(std::ostream& os);
772
773  HBasicBlock* GetHeader() const {
774    return header_;
775  }
776
777  void SetHeader(HBasicBlock* block) {
778    header_ = block;
779  }
780
781  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
782  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
783  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
784
785  void AddBackEdge(HBasicBlock* back_edge) {
786    back_edges_.push_back(back_edge);
787  }
788
789  void RemoveBackEdge(HBasicBlock* back_edge) {
790    RemoveElement(back_edges_, back_edge);
791  }
792
793  bool IsBackEdge(const HBasicBlock& block) const {
794    return ContainsElement(back_edges_, &block);
795  }
796
797  size_t NumberOfBackEdges() const {
798    return back_edges_.size();
799  }
800
801  HBasicBlock* GetPreHeader() const;
802
803  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
804    return back_edges_;
805  }
806
807  // Returns the lifetime position of the back edge that has the
808  // greatest lifetime position.
809  size_t GetLifetimeEnd() const;
810
811  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
812    ReplaceElement(back_edges_, existing, new_back_edge);
813  }
814
815  // Finds blocks that are part of this loop.
816  void Populate();
817
818  // Returns whether this loop information contains `block`.
819  // Note that this loop information *must* be populated before entering this function.
820  bool Contains(const HBasicBlock& block) const;
821
822  // Returns whether this loop information is an inner loop of `other`.
823  // Note that `other` *must* be populated before entering this function.
824  bool IsIn(const HLoopInformation& other) const;
825
826  // Returns true if instruction is not defined within this loop.
827  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
828
829  const ArenaBitVector& GetBlocks() const { return blocks_; }
830
831  void Add(HBasicBlock* block);
832  void Remove(HBasicBlock* block);
833
834  void ClearAllBlocks() {
835    blocks_.ClearAllBits();
836  }
837
838  bool HasBackEdgeNotDominatedByHeader() const;
839
840  bool IsPopulated() const {
841    return blocks_.GetHighestBitSet() != -1;
842  }
843
844  bool DominatesAllBackEdges(HBasicBlock* block);
845
846  bool HasExitEdge() const;
847
848 private:
849  // Internal recursive implementation of `Populate`.
850  void PopulateRecursive(HBasicBlock* block);
851  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
852
853  HBasicBlock* header_;
854  HSuspendCheck* suspend_check_;
855  bool irreducible_;
856  bool contains_irreducible_loop_;
857  ArenaVector<HBasicBlock*> back_edges_;
858  ArenaBitVector blocks_;
859
860  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
861};
862
863// Stores try/catch information for basic blocks.
864// Note that HGraph is constructed so that catch blocks cannot simultaneously
865// be try blocks.
866class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
867 public:
868  // Try block information constructor.
869  explicit TryCatchInformation(const HTryBoundary& try_entry)
870      : try_entry_(&try_entry),
871        catch_dex_file_(nullptr),
872        catch_type_index_(DexFile::kDexNoIndex16) {
873    DCHECK(try_entry_ != nullptr);
874  }
875
876  // Catch block information constructor.
877  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
878      : try_entry_(nullptr),
879        catch_dex_file_(&dex_file),
880        catch_type_index_(catch_type_index) {}
881
882  bool IsTryBlock() const { return try_entry_ != nullptr; }
883
884  const HTryBoundary& GetTryEntry() const {
885    DCHECK(IsTryBlock());
886    return *try_entry_;
887  }
888
889  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
890
891  bool IsCatchAllTypeIndex() const {
892    DCHECK(IsCatchBlock());
893    return !catch_type_index_.IsValid();
894  }
895
896  dex::TypeIndex GetCatchTypeIndex() const {
897    DCHECK(IsCatchBlock());
898    return catch_type_index_;
899  }
900
901  const DexFile& GetCatchDexFile() const {
902    DCHECK(IsCatchBlock());
903    return *catch_dex_file_;
904  }
905
906 private:
907  // One of possibly several TryBoundary instructions entering the block's try.
908  // Only set for try blocks.
909  const HTryBoundary* try_entry_;
910
911  // Exception type information. Only set for catch blocks.
912  const DexFile* catch_dex_file_;
913  const dex::TypeIndex catch_type_index_;
914};
915
916static constexpr size_t kNoLifetime = -1;
917static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
918
919// A block in a method. Contains the list of instructions represented
920// as a double linked list. Each block knows its predecessors and
921// successors.
922
923class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
924 public:
925  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
926      : graph_(graph),
927        predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
928        successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
929        loop_information_(nullptr),
930        dominator_(nullptr),
931        dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
932        block_id_(kInvalidBlockId),
933        dex_pc_(dex_pc),
934        lifetime_start_(kNoLifetime),
935        lifetime_end_(kNoLifetime),
936        try_catch_information_(nullptr) {
937    predecessors_.reserve(kDefaultNumberOfPredecessors);
938    successors_.reserve(kDefaultNumberOfSuccessors);
939    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
940  }
941
942  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
943    return predecessors_;
944  }
945
946  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
947    return successors_;
948  }
949
950  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
951  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
952
953  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
954    return ContainsElement(successors_, block, start_from);
955  }
956
957  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
958    return dominated_blocks_;
959  }
960
961  bool IsEntryBlock() const {
962    return graph_->GetEntryBlock() == this;
963  }
964
965  bool IsExitBlock() const {
966    return graph_->GetExitBlock() == this;
967  }
968
969  bool IsSingleGoto() const;
970  bool IsSingleReturn() const;
971  bool IsSingleTryBoundary() const;
972
973  // Returns true if this block emits nothing but a jump.
974  bool IsSingleJump() const {
975    HLoopInformation* loop_info = GetLoopInformation();
976    return (IsSingleGoto() || IsSingleTryBoundary())
977           // Back edges generate a suspend check.
978           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
979  }
980
981  void AddBackEdge(HBasicBlock* back_edge) {
982    if (loop_information_ == nullptr) {
983      loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
984    }
985    DCHECK_EQ(loop_information_->GetHeader(), this);
986    loop_information_->AddBackEdge(back_edge);
987  }
988
989  HGraph* GetGraph() const { return graph_; }
990  void SetGraph(HGraph* graph) { graph_ = graph; }
991
992  uint32_t GetBlockId() const { return block_id_; }
993  void SetBlockId(int id) { block_id_ = id; }
994  uint32_t GetDexPc() const { return dex_pc_; }
995
996  HBasicBlock* GetDominator() const { return dominator_; }
997  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
998  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
999
1000  void RemoveDominatedBlock(HBasicBlock* block) {
1001    RemoveElement(dominated_blocks_, block);
1002  }
1003
1004  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1005    ReplaceElement(dominated_blocks_, existing, new_block);
1006  }
1007
1008  void ClearDominanceInformation();
1009
1010  int NumberOfBackEdges() const {
1011    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1012  }
1013
1014  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1015  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1016  const HInstructionList& GetInstructions() const { return instructions_; }
1017  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1018  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1019  const HInstructionList& GetPhis() const { return phis_; }
1020
1021  HInstruction* GetFirstInstructionDisregardMoves() const;
1022
1023  void AddSuccessor(HBasicBlock* block) {
1024    successors_.push_back(block);
1025    block->predecessors_.push_back(this);
1026  }
1027
1028  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1029    size_t successor_index = GetSuccessorIndexOf(existing);
1030    existing->RemovePredecessor(this);
1031    new_block->predecessors_.push_back(this);
1032    successors_[successor_index] = new_block;
1033  }
1034
1035  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1036    size_t predecessor_index = GetPredecessorIndexOf(existing);
1037    existing->RemoveSuccessor(this);
1038    new_block->successors_.push_back(this);
1039    predecessors_[predecessor_index] = new_block;
1040  }
1041
1042  // Insert `this` between `predecessor` and `successor. This method
1043  // preserves the indicies, and will update the first edge found between
1044  // `predecessor` and `successor`.
1045  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1046    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1047    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1048    successor->predecessors_[predecessor_index] = this;
1049    predecessor->successors_[successor_index] = this;
1050    successors_.push_back(successor);
1051    predecessors_.push_back(predecessor);
1052  }
1053
1054  void RemovePredecessor(HBasicBlock* block) {
1055    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1056  }
1057
1058  void RemoveSuccessor(HBasicBlock* block) {
1059    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1060  }
1061
1062  void ClearAllPredecessors() {
1063    predecessors_.clear();
1064  }
1065
1066  void AddPredecessor(HBasicBlock* block) {
1067    predecessors_.push_back(block);
1068    block->successors_.push_back(this);
1069  }
1070
1071  void SwapPredecessors() {
1072    DCHECK_EQ(predecessors_.size(), 2u);
1073    std::swap(predecessors_[0], predecessors_[1]);
1074  }
1075
1076  void SwapSuccessors() {
1077    DCHECK_EQ(successors_.size(), 2u);
1078    std::swap(successors_[0], successors_[1]);
1079  }
1080
1081  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1082    return IndexOfElement(predecessors_, predecessor);
1083  }
1084
1085  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1086    return IndexOfElement(successors_, successor);
1087  }
1088
1089  HBasicBlock* GetSinglePredecessor() const {
1090    DCHECK_EQ(GetPredecessors().size(), 1u);
1091    return GetPredecessors()[0];
1092  }
1093
1094  HBasicBlock* GetSingleSuccessor() const {
1095    DCHECK_EQ(GetSuccessors().size(), 1u);
1096    return GetSuccessors()[0];
1097  }
1098
1099  // Returns whether the first occurrence of `predecessor` in the list of
1100  // predecessors is at index `idx`.
1101  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1102    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1103    return GetPredecessorIndexOf(predecessor) == idx;
1104  }
1105
1106  // Create a new block between this block and its predecessors. The new block
1107  // is added to the graph, all predecessor edges are relinked to it and an edge
1108  // is created to `this`. Returns the new empty block. Reverse post order or
1109  // loop and try/catch information are not updated.
1110  HBasicBlock* CreateImmediateDominator();
1111
1112  // Split the block into two blocks just before `cursor`. Returns the newly
1113  // created, latter block. Note that this method will add the block to the
1114  // graph, create a Goto at the end of the former block and will create an edge
1115  // between the blocks. It will not, however, update the reverse post order or
1116  // loop and try/catch information.
1117  HBasicBlock* SplitBefore(HInstruction* cursor);
1118
1119  // Split the block into two blocks just before `cursor`. Returns the newly
1120  // created block. Note that this method just updates raw block information,
1121  // like predecessors, successors, dominators, and instruction list. It does not
1122  // update the graph, reverse post order, loop information, nor make sure the
1123  // blocks are consistent (for example ending with a control flow instruction).
1124  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1125
1126  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1127  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1128
1129  // Merge `other` at the end of `this`. Successors and dominated blocks of
1130  // `other` are changed to be successors and dominated blocks of `this`. Note
1131  // that this method does not update the graph, reverse post order, loop
1132  // information, nor make sure the blocks are consistent (for example ending
1133  // with a control flow instruction).
1134  void MergeWithInlined(HBasicBlock* other);
1135
1136  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1137  // of `this` are moved to `other`.
1138  // Note that this method does not update the graph, reverse post order, loop
1139  // information, nor make sure the blocks are consistent (for example ending
1140  // with a control flow instruction).
1141  void ReplaceWith(HBasicBlock* other);
1142
1143  // Merges the instructions of `other` at the end of `this`.
1144  void MergeInstructionsWith(HBasicBlock* other);
1145
1146  // Merge `other` at the end of `this`. This method updates loops, reverse post
1147  // order, links to predecessors, successors, dominators and deletes the block
1148  // from the graph. The two blocks must be successive, i.e. `this` the only
1149  // predecessor of `other` and vice versa.
1150  void MergeWith(HBasicBlock* other);
1151
1152  // Disconnects `this` from all its predecessors, successors and dominator,
1153  // removes it from all loops it is included in and eventually from the graph.
1154  // The block must not dominate any other block. Predecessors and successors
1155  // are safely updated.
1156  void DisconnectAndDelete();
1157
1158  void AddInstruction(HInstruction* instruction);
1159  // Insert `instruction` before/after an existing instruction `cursor`.
1160  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1161  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1162  // Replace instruction `initial` with `replacement` within this block.
1163  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1164                                       HInstruction* replacement);
1165  void AddPhi(HPhi* phi);
1166  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1167  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1168  // instruction list. With 'ensure_safety' set to true, it verifies that the
1169  // instruction is not in use and removes it from the use lists of its inputs.
1170  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1171  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1172  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1173
1174  bool IsLoopHeader() const {
1175    return IsInLoop() && (loop_information_->GetHeader() == this);
1176  }
1177
1178  bool IsLoopPreHeaderFirstPredecessor() const {
1179    DCHECK(IsLoopHeader());
1180    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1181  }
1182
1183  bool IsFirstPredecessorBackEdge() const {
1184    DCHECK(IsLoopHeader());
1185    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1186  }
1187
1188  HLoopInformation* GetLoopInformation() const {
1189    return loop_information_;
1190  }
1191
1192  // Set the loop_information_ on this block. Overrides the current
1193  // loop_information if it is an outer loop of the passed loop information.
1194  // Note that this method is called while creating the loop information.
1195  void SetInLoop(HLoopInformation* info) {
1196    if (IsLoopHeader()) {
1197      // Nothing to do. This just means `info` is an outer loop.
1198    } else if (!IsInLoop()) {
1199      loop_information_ = info;
1200    } else if (loop_information_->Contains(*info->GetHeader())) {
1201      // Block is currently part of an outer loop. Make it part of this inner loop.
1202      // Note that a non loop header having a loop information means this loop information
1203      // has already been populated
1204      loop_information_ = info;
1205    } else {
1206      // Block is part of an inner loop. Do not update the loop information.
1207      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1208      // at this point, because this method is being called while populating `info`.
1209    }
1210  }
1211
1212  // Raw update of the loop information.
1213  void SetLoopInformation(HLoopInformation* info) {
1214    loop_information_ = info;
1215  }
1216
1217  bool IsInLoop() const { return loop_information_ != nullptr; }
1218
1219  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1220
1221  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1222    try_catch_information_ = try_catch_information;
1223  }
1224
1225  bool IsTryBlock() const {
1226    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1227  }
1228
1229  bool IsCatchBlock() const {
1230    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1231  }
1232
1233  // Returns the try entry that this block's successors should have. They will
1234  // be in the same try, unless the block ends in a try boundary. In that case,
1235  // the appropriate try entry will be returned.
1236  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1237
1238  bool HasThrowingInstructions() const;
1239
1240  // Returns whether this block dominates the blocked passed as parameter.
1241  bool Dominates(HBasicBlock* block) const;
1242
1243  size_t GetLifetimeStart() const { return lifetime_start_; }
1244  size_t GetLifetimeEnd() const { return lifetime_end_; }
1245
1246  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1247  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1248
1249  bool EndsWithControlFlowInstruction() const;
1250  bool EndsWithIf() const;
1251  bool EndsWithTryBoundary() const;
1252  bool HasSinglePhi() const;
1253
1254 private:
1255  HGraph* graph_;
1256  ArenaVector<HBasicBlock*> predecessors_;
1257  ArenaVector<HBasicBlock*> successors_;
1258  HInstructionList instructions_;
1259  HInstructionList phis_;
1260  HLoopInformation* loop_information_;
1261  HBasicBlock* dominator_;
1262  ArenaVector<HBasicBlock*> dominated_blocks_;
1263  uint32_t block_id_;
1264  // The dex program counter of the first instruction of this block.
1265  const uint32_t dex_pc_;
1266  size_t lifetime_start_;
1267  size_t lifetime_end_;
1268  TryCatchInformation* try_catch_information_;
1269
1270  friend class HGraph;
1271  friend class HInstruction;
1272
1273  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1274};
1275
1276// Iterates over the LoopInformation of all loops which contain 'block'
1277// from the innermost to the outermost.
1278class HLoopInformationOutwardIterator : public ValueObject {
1279 public:
1280  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1281      : current_(block.GetLoopInformation()) {}
1282
1283  bool Done() const { return current_ == nullptr; }
1284
1285  void Advance() {
1286    DCHECK(!Done());
1287    current_ = current_->GetPreHeader()->GetLoopInformation();
1288  }
1289
1290  HLoopInformation* Current() const {
1291    DCHECK(!Done());
1292    return current_;
1293  }
1294
1295 private:
1296  HLoopInformation* current_;
1297
1298  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1299};
1300
1301#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1302  M(Above, Condition)                                                   \
1303  M(AboveOrEqual, Condition)                                            \
1304  M(Add, BinaryOperation)                                               \
1305  M(And, BinaryOperation)                                               \
1306  M(ArrayGet, Instruction)                                              \
1307  M(ArrayLength, Instruction)                                           \
1308  M(ArraySet, Instruction)                                              \
1309  M(Below, Condition)                                                   \
1310  M(BelowOrEqual, Condition)                                            \
1311  M(BooleanNot, UnaryOperation)                                         \
1312  M(BoundsCheck, Instruction)                                           \
1313  M(BoundType, Instruction)                                             \
1314  M(CheckCast, Instruction)                                             \
1315  M(ClassTableGet, Instruction)                                         \
1316  M(ClearException, Instruction)                                        \
1317  M(ClinitCheck, Instruction)                                           \
1318  M(Compare, BinaryOperation)                                           \
1319  M(ConstructorFence, Instruction)                                      \
1320  M(CurrentMethod, Instruction)                                         \
1321  M(ShouldDeoptimizeFlag, Instruction)                                  \
1322  M(Deoptimize, Instruction)                                            \
1323  M(Div, BinaryOperation)                                               \
1324  M(DivZeroCheck, Instruction)                                          \
1325  M(DoubleConstant, Constant)                                           \
1326  M(Equal, Condition)                                                   \
1327  M(Exit, Instruction)                                                  \
1328  M(FloatConstant, Constant)                                            \
1329  M(Goto, Instruction)                                                  \
1330  M(GreaterThan, Condition)                                             \
1331  M(GreaterThanOrEqual, Condition)                                      \
1332  M(If, Instruction)                                                    \
1333  M(InstanceFieldGet, Instruction)                                      \
1334  M(InstanceFieldSet, Instruction)                                      \
1335  M(InstanceOf, Instruction)                                            \
1336  M(IntConstant, Constant)                                              \
1337  M(InvokeUnresolved, Invoke)                                           \
1338  M(InvokeInterface, Invoke)                                            \
1339  M(InvokeStaticOrDirect, Invoke)                                       \
1340  M(InvokeVirtual, Invoke)                                              \
1341  M(InvokePolymorphic, Invoke)                                          \
1342  M(LessThan, Condition)                                                \
1343  M(LessThanOrEqual, Condition)                                         \
1344  M(LoadClass, Instruction)                                             \
1345  M(LoadException, Instruction)                                         \
1346  M(LoadString, Instruction)                                            \
1347  M(LongConstant, Constant)                                             \
1348  M(MemoryBarrier, Instruction)                                         \
1349  M(MonitorOperation, Instruction)                                      \
1350  M(Mul, BinaryOperation)                                               \
1351  M(NativeDebugInfo, Instruction)                                       \
1352  M(Neg, UnaryOperation)                                                \
1353  M(NewArray, Instruction)                                              \
1354  M(NewInstance, Instruction)                                           \
1355  M(Not, UnaryOperation)                                                \
1356  M(NotEqual, Condition)                                                \
1357  M(NullConstant, Instruction)                                          \
1358  M(NullCheck, Instruction)                                             \
1359  M(Or, BinaryOperation)                                                \
1360  M(PackedSwitch, Instruction)                                          \
1361  M(ParallelMove, Instruction)                                          \
1362  M(ParameterValue, Instruction)                                        \
1363  M(Phi, Instruction)                                                   \
1364  M(Rem, BinaryOperation)                                               \
1365  M(Return, Instruction)                                                \
1366  M(ReturnVoid, Instruction)                                            \
1367  M(Ror, BinaryOperation)                                               \
1368  M(Shl, BinaryOperation)                                               \
1369  M(Shr, BinaryOperation)                                               \
1370  M(StaticFieldGet, Instruction)                                        \
1371  M(StaticFieldSet, Instruction)                                        \
1372  M(UnresolvedInstanceFieldGet, Instruction)                            \
1373  M(UnresolvedInstanceFieldSet, Instruction)                            \
1374  M(UnresolvedStaticFieldGet, Instruction)                              \
1375  M(UnresolvedStaticFieldSet, Instruction)                              \
1376  M(Select, Instruction)                                                \
1377  M(Sub, BinaryOperation)                                               \
1378  M(SuspendCheck, Instruction)                                          \
1379  M(Throw, Instruction)                                                 \
1380  M(TryBoundary, Instruction)                                           \
1381  M(TypeConversion, Instruction)                                        \
1382  M(UShr, BinaryOperation)                                              \
1383  M(Xor, BinaryOperation)                                               \
1384  M(VecReplicateScalar, VecUnaryOperation)                              \
1385  M(VecExtractScalar, VecUnaryOperation)                                \
1386  M(VecReduce, VecUnaryOperation)                                       \
1387  M(VecCnv, VecUnaryOperation)                                          \
1388  M(VecNeg, VecUnaryOperation)                                          \
1389  M(VecAbs, VecUnaryOperation)                                          \
1390  M(VecNot, VecUnaryOperation)                                          \
1391  M(VecAdd, VecBinaryOperation)                                         \
1392  M(VecHalvingAdd, VecBinaryOperation)                                  \
1393  M(VecSub, VecBinaryOperation)                                         \
1394  M(VecMul, VecBinaryOperation)                                         \
1395  M(VecDiv, VecBinaryOperation)                                         \
1396  M(VecMin, VecBinaryOperation)                                         \
1397  M(VecMax, VecBinaryOperation)                                         \
1398  M(VecAnd, VecBinaryOperation)                                         \
1399  M(VecAndNot, VecBinaryOperation)                                      \
1400  M(VecOr, VecBinaryOperation)                                          \
1401  M(VecXor, VecBinaryOperation)                                         \
1402  M(VecShl, VecBinaryOperation)                                         \
1403  M(VecShr, VecBinaryOperation)                                         \
1404  M(VecUShr, VecBinaryOperation)                                        \
1405  M(VecSetScalars, VecOperation)                                        \
1406  M(VecMultiplyAccumulate, VecOperation)                                \
1407  M(VecSADAccumulate, VecOperation)                                     \
1408  M(VecLoad, VecMemoryOperation)                                        \
1409  M(VecStore, VecMemoryOperation)                                       \
1410
1411/*
1412 * Instructions, shared across several (not all) architectures.
1413 */
1414#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1415#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1416#else
1417#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1418  M(BitwiseNegatedRight, Instruction)                                   \
1419  M(DataProcWithShifterOp, Instruction)                                 \
1420  M(MultiplyAccumulate, Instruction)                                    \
1421  M(IntermediateAddress, Instruction)                                   \
1422  M(IntermediateAddressIndex, Instruction)
1423#endif
1424
1425#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1426
1427#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1428
1429#ifndef ART_ENABLE_CODEGEN_mips
1430#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1431#else
1432#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1433  M(MipsComputeBaseMethodAddress, Instruction)                          \
1434  M(MipsPackedSwitch, Instruction)                                      \
1435  M(IntermediateArrayAddressIndex, Instruction)
1436#endif
1437
1438#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1439
1440#ifndef ART_ENABLE_CODEGEN_x86
1441#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1442#else
1443#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1444  M(X86ComputeBaseMethodAddress, Instruction)                           \
1445  M(X86LoadFromConstantTable, Instruction)                              \
1446  M(X86FPNeg, Instruction)                                              \
1447  M(X86PackedSwitch, Instruction)
1448#endif
1449
1450#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1451
1452#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1453  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1454  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1455  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1456  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1457  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1458  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1459  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1460  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1461
1462#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1463  M(Condition, BinaryOperation)                                         \
1464  M(Constant, Instruction)                                              \
1465  M(UnaryOperation, Instruction)                                        \
1466  M(BinaryOperation, Instruction)                                       \
1467  M(Invoke, Instruction)                                                \
1468  M(VecOperation, Instruction)                                          \
1469  M(VecUnaryOperation, VecOperation)                                    \
1470  M(VecBinaryOperation, VecOperation)                                   \
1471  M(VecMemoryOperation, VecOperation)
1472
1473#define FOR_EACH_INSTRUCTION(M)                                         \
1474  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1475  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1476
1477#define FORWARD_DECLARATION(type, super) class H##type;
1478FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1479#undef FORWARD_DECLARATION
1480
1481#define DECLARE_INSTRUCTION(type)                                         \
1482  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1483  const char* DebugName() const OVERRIDE { return #type; }                \
1484  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1485    return other->Is##type();                                             \
1486  }                                                                       \
1487  void Accept(HGraphVisitor* visitor) OVERRIDE
1488
1489#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1490  bool Is##type() const { return As##type() != nullptr; }               \
1491  const H##type* As##type() const { return this; }                      \
1492  H##type* As##type() { return this; }
1493
1494template <typename T>
1495class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1496                     public IntrusiveForwardListNode<HUseListNode<T>> {
1497 public:
1498  // Get the instruction which has this use as one of the inputs.
1499  T GetUser() const { return user_; }
1500  // Get the position of the input record that this use corresponds to.
1501  size_t GetIndex() const { return index_; }
1502  // Set the position of the input record that this use corresponds to.
1503  void SetIndex(size_t index) { index_ = index; }
1504
1505 private:
1506  HUseListNode(T user, size_t index)
1507      : user_(user), index_(index) {}
1508
1509  T const user_;
1510  size_t index_;
1511
1512  friend class HInstruction;
1513
1514  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1515};
1516
1517template <typename T>
1518using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1519
1520// This class is used by HEnvironment and HInstruction classes to record the
1521// instructions they use and pointers to the corresponding HUseListNodes kept
1522// by the used instructions.
1523template <typename T>
1524class HUserRecord : public ValueObject {
1525 public:
1526  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1527  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1528
1529  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1530      : HUserRecord(old_record.instruction_, before_use_node) {}
1531  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1532      : instruction_(instruction), before_use_node_(before_use_node) {
1533    DCHECK(instruction_ != nullptr);
1534  }
1535
1536  HInstruction* GetInstruction() const { return instruction_; }
1537  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1538  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1539
1540 private:
1541  // Instruction used by the user.
1542  HInstruction* instruction_;
1543
1544  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1545  typename HUseList<T>::iterator before_use_node_;
1546};
1547
1548// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1549// This is used for HInstruction::GetInputs() to return a container wrapper providing
1550// HInstruction* values even though the underlying container has HUserRecord<>s.
1551struct HInputExtractor {
1552  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1553    return record.GetInstruction();
1554  }
1555  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1556    return record.GetInstruction();
1557  }
1558};
1559
1560using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1561using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1562
1563/**
1564 * Side-effects representation.
1565 *
1566 * For write/read dependences on fields/arrays, the dependence analysis uses
1567 * type disambiguation (e.g. a float field write cannot modify the value of an
1568 * integer field read) and the access type (e.g.  a reference array write cannot
1569 * modify the value of a reference field read [although it may modify the
1570 * reference fetch prior to reading the field, which is represented by its own
1571 * write/read dependence]). The analysis makes conservative points-to
1572 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1573 * the same, and any reference read depends on any reference read without
1574 * further regard of its type).
1575 *
1576 * The internal representation uses 38-bit and is described in the table below.
1577 * The first line indicates the side effect, and for field/array accesses the
1578 * second line indicates the type of the access (in the order of the
1579 * DataType::Type enum).
1580 * The two numbered lines below indicate the bit position in the bitfield (read
1581 * vertically).
1582 *
1583 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1584 *   +-------------+---------+---------+--------------+---------+---------+
1585 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1586 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1587 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1588 *
1589 * Note that, to ease the implementation, 'changes' bits are least significant
1590 * bits, while 'dependency' bits are most significant bits.
1591 */
1592class SideEffects : public ValueObject {
1593 public:
1594  SideEffects() : flags_(0) {}
1595
1596  static SideEffects None() {
1597    return SideEffects(0);
1598  }
1599
1600  static SideEffects All() {
1601    return SideEffects(kAllChangeBits | kAllDependOnBits);
1602  }
1603
1604  static SideEffects AllChanges() {
1605    return SideEffects(kAllChangeBits);
1606  }
1607
1608  static SideEffects AllDependencies() {
1609    return SideEffects(kAllDependOnBits);
1610  }
1611
1612  static SideEffects AllExceptGCDependency() {
1613    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1614  }
1615
1616  static SideEffects AllWritesAndReads() {
1617    return SideEffects(kAllWrites | kAllReads);
1618  }
1619
1620  static SideEffects AllWrites() {
1621    return SideEffects(kAllWrites);
1622  }
1623
1624  static SideEffects AllReads() {
1625    return SideEffects(kAllReads);
1626  }
1627
1628  static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1629    return is_volatile
1630        ? AllWritesAndReads()
1631        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1632  }
1633
1634  static SideEffects ArrayWriteOfType(DataType::Type type) {
1635    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1636  }
1637
1638  static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1639    return is_volatile
1640        ? AllWritesAndReads()
1641        : SideEffects(TypeFlag(type, kFieldReadOffset));
1642  }
1643
1644  static SideEffects ArrayReadOfType(DataType::Type type) {
1645    return SideEffects(TypeFlag(type, kArrayReadOffset));
1646  }
1647
1648  static SideEffects CanTriggerGC() {
1649    return SideEffects(1ULL << kCanTriggerGCBit);
1650  }
1651
1652  static SideEffects DependsOnGC() {
1653    return SideEffects(1ULL << kDependsOnGCBit);
1654  }
1655
1656  // Combines the side-effects of this and the other.
1657  SideEffects Union(SideEffects other) const {
1658    return SideEffects(flags_ | other.flags_);
1659  }
1660
1661  SideEffects Exclusion(SideEffects other) const {
1662    return SideEffects(flags_ & ~other.flags_);
1663  }
1664
1665  void Add(SideEffects other) {
1666    flags_ |= other.flags_;
1667  }
1668
1669  bool Includes(SideEffects other) const {
1670    return (other.flags_ & flags_) == other.flags_;
1671  }
1672
1673  bool HasSideEffects() const {
1674    return (flags_ & kAllChangeBits);
1675  }
1676
1677  bool HasDependencies() const {
1678    return (flags_ & kAllDependOnBits);
1679  }
1680
1681  // Returns true if there are no side effects or dependencies.
1682  bool DoesNothing() const {
1683    return flags_ == 0;
1684  }
1685
1686  // Returns true if something is written.
1687  bool DoesAnyWrite() const {
1688    return (flags_ & kAllWrites);
1689  }
1690
1691  // Returns true if something is read.
1692  bool DoesAnyRead() const {
1693    return (flags_ & kAllReads);
1694  }
1695
1696  // Returns true if potentially everything is written and read
1697  // (every type and every kind of access).
1698  bool DoesAllReadWrite() const {
1699    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1700  }
1701
1702  bool DoesAll() const {
1703    return flags_ == (kAllChangeBits | kAllDependOnBits);
1704  }
1705
1706  // Returns true if `this` may read something written by `other`.
1707  bool MayDependOn(SideEffects other) const {
1708    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1709    return (other.flags_ & depends_on_flags);
1710  }
1711
1712  // Returns string representation of flags (for debugging only).
1713  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1714  std::string ToString() const {
1715    std::string flags = "|";
1716    for (int s = kLastBit; s >= 0; s--) {
1717      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1718      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1719        // This is a bit for the GC side effect.
1720        if (current_bit_is_set) {
1721          flags += "GC";
1722        }
1723        flags += "|";
1724      } else {
1725        // This is a bit for the array/field analysis.
1726        // The underscore character stands for the 'can trigger GC' bit.
1727        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1728        if (current_bit_is_set) {
1729          flags += kDebug[s];
1730        }
1731        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1732            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1733          flags += "|";
1734        }
1735      }
1736    }
1737    return flags;
1738  }
1739
1740  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1741
1742 private:
1743  static constexpr int kFieldArrayAnalysisBits = 9;
1744
1745  static constexpr int kFieldWriteOffset = 0;
1746  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1747  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1748  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1749
1750  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1751
1752  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1753  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1754  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1755  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1756
1757  static constexpr int kLastBit = kDependsOnGCBit;
1758  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1759
1760  // Aliases.
1761
1762  static_assert(kChangeBits == kDependOnBits,
1763                "the 'change' bits should match the 'depend on' bits.");
1764
1765  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1766  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1767  static constexpr uint64_t kAllWrites =
1768      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1769  static constexpr uint64_t kAllReads =
1770      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1771
1772  // Translates type to bit flag. The type must correspond to a Java type.
1773  static uint64_t TypeFlag(DataType::Type type, int offset) {
1774    int shift;
1775    switch (type) {
1776      case DataType::Type::kReference: shift = 0; break;
1777      case DataType::Type::kBool:      shift = 1; break;
1778      case DataType::Type::kInt8:      shift = 2; break;
1779      case DataType::Type::kUint16:    shift = 3; break;
1780      case DataType::Type::kInt16:     shift = 4; break;
1781      case DataType::Type::kInt32:     shift = 5; break;
1782      case DataType::Type::kInt64:     shift = 6; break;
1783      case DataType::Type::kFloat32:   shift = 7; break;
1784      case DataType::Type::kFloat64:   shift = 8; break;
1785      default:
1786        LOG(FATAL) << "Unexpected data type " << type;
1787        UNREACHABLE();
1788    }
1789    DCHECK_LE(kFieldWriteOffset, shift);
1790    DCHECK_LT(shift, kArrayWriteOffset);
1791    return UINT64_C(1) << (shift + offset);
1792  }
1793
1794  // Private constructor on direct flags value.
1795  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1796
1797  uint64_t flags_;
1798};
1799
1800// A HEnvironment object contains the values of virtual registers at a given location.
1801class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1802 public:
1803  ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1804                             size_t number_of_vregs,
1805                             ArtMethod* method,
1806                             uint32_t dex_pc,
1807                             HInstruction* holder)
1808     : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
1809       locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
1810       parent_(nullptr),
1811       method_(method),
1812       dex_pc_(dex_pc),
1813       holder_(holder) {
1814  }
1815
1816  ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1817                             const HEnvironment& to_copy,
1818                             HInstruction* holder)
1819      : HEnvironment(allocator,
1820                     to_copy.Size(),
1821                     to_copy.GetMethod(),
1822                     to_copy.GetDexPc(),
1823                     holder) {}
1824
1825  void AllocateLocations() {
1826    DCHECK(locations_.empty());
1827    locations_.resize(vregs_.size());
1828  }
1829
1830  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1831    if (parent_ != nullptr) {
1832      parent_->SetAndCopyParentChain(allocator, parent);
1833    } else {
1834      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1835      parent_->CopyFrom(parent);
1836      if (parent->GetParent() != nullptr) {
1837        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1838      }
1839    }
1840  }
1841
1842  void CopyFrom(ArrayRef<HInstruction* const> locals);
1843  void CopyFrom(HEnvironment* environment);
1844
1845  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1846  // input to the loop phi instead. This is for inserting instructions that
1847  // require an environment (like HDeoptimization) in the loop pre-header.
1848  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1849
1850  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1851    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1852  }
1853
1854  HInstruction* GetInstructionAt(size_t index) const {
1855    return vregs_[index].GetInstruction();
1856  }
1857
1858  void RemoveAsUserOfInput(size_t index) const;
1859
1860  size_t Size() const { return vregs_.size(); }
1861
1862  HEnvironment* GetParent() const { return parent_; }
1863
1864  void SetLocationAt(size_t index, Location location) {
1865    locations_[index] = location;
1866  }
1867
1868  Location GetLocationAt(size_t index) const {
1869    return locations_[index];
1870  }
1871
1872  uint32_t GetDexPc() const {
1873    return dex_pc_;
1874  }
1875
1876  ArtMethod* GetMethod() const {
1877    return method_;
1878  }
1879
1880  HInstruction* GetHolder() const {
1881    return holder_;
1882  }
1883
1884
1885  bool IsFromInlinedInvoke() const {
1886    return GetParent() != nullptr;
1887  }
1888
1889 private:
1890  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1891  ArenaVector<Location> locations_;
1892  HEnvironment* parent_;
1893  ArtMethod* method_;
1894  const uint32_t dex_pc_;
1895
1896  // The instruction that holds this environment.
1897  HInstruction* const holder_;
1898
1899  friend class HInstruction;
1900
1901  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1902};
1903
1904class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1905 public:
1906  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1907      : previous_(nullptr),
1908        next_(nullptr),
1909        block_(nullptr),
1910        dex_pc_(dex_pc),
1911        id_(-1),
1912        ssa_index_(-1),
1913        packed_fields_(0u),
1914        environment_(nullptr),
1915        locations_(nullptr),
1916        live_interval_(nullptr),
1917        lifetime_position_(kNoLifetime),
1918        side_effects_(side_effects),
1919        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1920    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1921  }
1922
1923  virtual ~HInstruction() {}
1924
1925#define DECLARE_KIND(type, super) k##type,
1926  enum InstructionKind {
1927    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1928  };
1929#undef DECLARE_KIND
1930
1931  HInstruction* GetNext() const { return next_; }
1932  HInstruction* GetPrevious() const { return previous_; }
1933
1934  HInstruction* GetNextDisregardingMoves() const;
1935  HInstruction* GetPreviousDisregardingMoves() const;
1936
1937  HBasicBlock* GetBlock() const { return block_; }
1938  ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
1939  void SetBlock(HBasicBlock* block) { block_ = block; }
1940  bool IsInBlock() const { return block_ != nullptr; }
1941  bool IsInLoop() const { return block_->IsInLoop(); }
1942  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1943  bool IsIrreducibleLoopHeaderPhi() const {
1944    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1945  }
1946
1947  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1948
1949  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1950    // One virtual method is enough, just const_cast<> and then re-add the const.
1951    return ArrayRef<const HUserRecord<HInstruction*>>(
1952        const_cast<HInstruction*>(this)->GetInputRecords());
1953  }
1954
1955  HInputsRef GetInputs() {
1956    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1957  }
1958
1959  HConstInputsRef GetInputs() const {
1960    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1961  }
1962
1963  size_t InputCount() const { return GetInputRecords().size(); }
1964  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1965
1966  bool HasInput(HInstruction* input) const {
1967    for (const HInstruction* i : GetInputs()) {
1968      if (i == input) {
1969        return true;
1970      }
1971    }
1972    return false;
1973  }
1974
1975  void SetRawInputAt(size_t index, HInstruction* input) {
1976    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1977  }
1978
1979  virtual void Accept(HGraphVisitor* visitor) = 0;
1980  virtual const char* DebugName() const = 0;
1981
1982  virtual DataType::Type GetType() const { return DataType::Type::kVoid; }
1983
1984  virtual bool NeedsEnvironment() const { return false; }
1985
1986  uint32_t GetDexPc() const { return dex_pc_; }
1987
1988  virtual bool IsControlFlow() const { return false; }
1989
1990  // Can the instruction throw?
1991  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1992  // could throw OOME, but it is still OK to remove them if they are unused.
1993  virtual bool CanThrow() const { return false; }
1994  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1995
1996  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1997  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1998
1999  // Does not apply for all instructions, but having this at top level greatly
2000  // simplifies the null check elimination.
2001  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
2002  virtual bool CanBeNull() const {
2003    DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2004    return true;
2005  }
2006
2007  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2008    return false;
2009  }
2010
2011  virtual bool IsActualObject() const {
2012    return GetType() == DataType::Type::kReference;
2013  }
2014
2015  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2016
2017  ReferenceTypeInfo GetReferenceTypeInfo() const {
2018    DCHECK_EQ(GetType(), DataType::Type::kReference);
2019    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2020                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
2021  }
2022
2023  void AddUseAt(HInstruction* user, size_t index) {
2024    DCHECK(user != nullptr);
2025    // Note: fixup_end remains valid across push_front().
2026    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2027    HUseListNode<HInstruction*>* new_node =
2028        new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HInstruction*>(user, index);
2029    uses_.push_front(*new_node);
2030    FixUpUserRecordsAfterUseInsertion(fixup_end);
2031  }
2032
2033  void AddEnvUseAt(HEnvironment* user, size_t index) {
2034    DCHECK(user != nullptr);
2035    // Note: env_fixup_end remains valid across push_front().
2036    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2037    HUseListNode<HEnvironment*>* new_node =
2038        new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2039    env_uses_.push_front(*new_node);
2040    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2041  }
2042
2043  void RemoveAsUserOfInput(size_t input) {
2044    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2045    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2046    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2047    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2048  }
2049
2050  void RemoveAsUserOfAllInputs() {
2051    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2052      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2053      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2054      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2055    }
2056  }
2057
2058  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2059  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2060
2061  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2062  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2063  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2064  bool HasOnlyOneNonEnvironmentUse() const {
2065    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2066  }
2067
2068  bool IsRemovable() const {
2069    return
2070        !DoesAnyWrite() &&
2071        !CanThrow() &&
2072        !IsSuspendCheck() &&
2073        !IsControlFlow() &&
2074        !IsNativeDebugInfo() &&
2075        !IsParameterValue() &&
2076        // If we added an explicit barrier then we should keep it.
2077        !IsMemoryBarrier() &&
2078        !IsConstructorFence();
2079  }
2080
2081  bool IsDeadAndRemovable() const {
2082    return IsRemovable() && !HasUses();
2083  }
2084
2085  // Does this instruction strictly dominate `other_instruction`?
2086  // Returns false if this instruction and `other_instruction` are the same.
2087  // Aborts if this instruction and `other_instruction` are both phis.
2088  bool StrictlyDominates(HInstruction* other_instruction) const;
2089
2090  int GetId() const { return id_; }
2091  void SetId(int id) { id_ = id; }
2092
2093  int GetSsaIndex() const { return ssa_index_; }
2094  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2095  bool HasSsaIndex() const { return ssa_index_ != -1; }
2096
2097  bool HasEnvironment() const { return environment_ != nullptr; }
2098  HEnvironment* GetEnvironment() const { return environment_; }
2099  // Set the `environment_` field. Raw because this method does not
2100  // update the uses lists.
2101  void SetRawEnvironment(HEnvironment* environment) {
2102    DCHECK(environment_ == nullptr);
2103    DCHECK_EQ(environment->GetHolder(), this);
2104    environment_ = environment;
2105  }
2106
2107  void InsertRawEnvironment(HEnvironment* environment) {
2108    DCHECK(environment_ != nullptr);
2109    DCHECK_EQ(environment->GetHolder(), this);
2110    DCHECK(environment->GetParent() == nullptr);
2111    environment->parent_ = environment_;
2112    environment_ = environment;
2113  }
2114
2115  void RemoveEnvironment();
2116
2117  // Set the environment of this instruction, copying it from `environment`. While
2118  // copying, the uses lists are being updated.
2119  void CopyEnvironmentFrom(HEnvironment* environment) {
2120    DCHECK(environment_ == nullptr);
2121    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2122    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2123    environment_->CopyFrom(environment);
2124    if (environment->GetParent() != nullptr) {
2125      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2126    }
2127  }
2128
2129  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2130                                                HBasicBlock* block) {
2131    DCHECK(environment_ == nullptr);
2132    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2133    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2134    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2135    if (environment->GetParent() != nullptr) {
2136      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2137    }
2138  }
2139
2140  // Returns the number of entries in the environment. Typically, that is the
2141  // number of dex registers in a method. It could be more in case of inlining.
2142  size_t EnvironmentSize() const;
2143
2144  LocationSummary* GetLocations() const { return locations_; }
2145  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2146
2147  void ReplaceWith(HInstruction* instruction);
2148  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2149  void ReplaceInput(HInstruction* replacement, size_t index);
2150
2151  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2152  // uses of this instruction by `other` are *not* updated.
2153  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2154    ReplaceWith(other);
2155    other->ReplaceInput(this, use_index);
2156  }
2157
2158  // Move `this` instruction before `cursor`
2159  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2160
2161  // Move `this` before its first user and out of any loops. If there is no
2162  // out-of-loop user that dominates all other users, move the instruction
2163  // to the end of the out-of-loop common dominator of the user's blocks.
2164  //
2165  // This can be used only on non-throwing instructions with no side effects that
2166  // have at least one use but no environment uses.
2167  void MoveBeforeFirstUserAndOutOfLoops();
2168
2169#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2170  bool Is##type() const;                                                       \
2171  const H##type* As##type() const;                                             \
2172  H##type* As##type();
2173
2174  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2175#undef INSTRUCTION_TYPE_CHECK
2176
2177#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2178  bool Is##type() const { return (As##type() != nullptr); }                    \
2179  virtual const H##type* As##type() const { return nullptr; }                  \
2180  virtual H##type* As##type() { return nullptr; }
2181  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2182#undef INSTRUCTION_TYPE_CHECK
2183
2184  // Returns whether the instruction can be moved within the graph.
2185  // TODO: this method is used by LICM and GVN with possibly different
2186  //       meanings? split and rename?
2187  virtual bool CanBeMoved() const { return false; }
2188
2189  // Returns whether the two instructions are of the same kind.
2190  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2191    return false;
2192  }
2193
2194  // Returns whether any data encoded in the two instructions is equal.
2195  // This method does not look at the inputs. Both instructions must be
2196  // of the same type, otherwise the method has undefined behavior.
2197  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2198    return false;
2199  }
2200
2201  // Returns whether two instructions are equal, that is:
2202  // 1) They have the same type and contain the same data (InstructionDataEquals).
2203  // 2) Their inputs are identical.
2204  bool Equals(const HInstruction* other) const;
2205
2206  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2207  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2208  // the virtual function because the __attribute__((__pure__)) doesn't really
2209  // apply the strong requirement for virtual functions, preventing optimizations.
2210  InstructionKind GetKind() const PURE;
2211  virtual InstructionKind GetKindInternal() const = 0;
2212
2213  virtual size_t ComputeHashCode() const {
2214    size_t result = GetKind();
2215    for (const HInstruction* input : GetInputs()) {
2216      result = (result * 31) + input->GetId();
2217    }
2218    return result;
2219  }
2220
2221  SideEffects GetSideEffects() const { return side_effects_; }
2222  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2223  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2224
2225  size_t GetLifetimePosition() const { return lifetime_position_; }
2226  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2227  LiveInterval* GetLiveInterval() const { return live_interval_; }
2228  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2229  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2230
2231  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2232
2233  // Returns whether the code generation of the instruction will require to have access
2234  // to the current method. Such instructions are:
2235  // (1): Instructions that require an environment, as calling the runtime requires
2236  //      to walk the stack and have the current method stored at a specific stack address.
2237  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2238  //      to access the dex cache.
2239  bool NeedsCurrentMethod() const {
2240    return NeedsEnvironment() || IsCurrentMethod();
2241  }
2242
2243  // Returns whether the code generation of the instruction will require to have access
2244  // to the dex cache of the current method's declaring class via the current method.
2245  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2246
2247  // Does this instruction have any use in an environment before
2248  // control flow hits 'other'?
2249  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2250
2251  // Remove all references to environment uses of this instruction.
2252  // The caller must ensure that this is safe to do.
2253  void RemoveEnvironmentUsers();
2254
2255  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2256  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2257
2258 protected:
2259  // If set, the machine code for this instruction is assumed to be generated by
2260  // its users. Used by liveness analysis to compute use positions accordingly.
2261  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2262  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2263  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2264  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2265
2266  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2267    return GetInputRecords()[i];
2268  }
2269
2270  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2271    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2272    input_records[index] = input;
2273  }
2274
2275  uint32_t GetPackedFields() const {
2276    return packed_fields_;
2277  }
2278
2279  template <size_t flag>
2280  bool GetPackedFlag() const {
2281    return (packed_fields_ & (1u << flag)) != 0u;
2282  }
2283
2284  template <size_t flag>
2285  void SetPackedFlag(bool value = true) {
2286    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2287  }
2288
2289  template <typename BitFieldType>
2290  typename BitFieldType::value_type GetPackedField() const {
2291    return BitFieldType::Decode(packed_fields_);
2292  }
2293
2294  template <typename BitFieldType>
2295  void SetPackedField(typename BitFieldType::value_type value) {
2296    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2297    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2298  }
2299
2300 private:
2301  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2302    auto before_use_node = uses_.before_begin();
2303    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2304      HInstruction* user = use_node->GetUser();
2305      size_t input_index = use_node->GetIndex();
2306      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2307      before_use_node = use_node;
2308    }
2309  }
2310
2311  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2312    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2313    if (next != uses_.end()) {
2314      HInstruction* next_user = next->GetUser();
2315      size_t next_index = next->GetIndex();
2316      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2317      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2318    }
2319  }
2320
2321  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2322    auto before_env_use_node = env_uses_.before_begin();
2323    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2324      HEnvironment* user = env_use_node->GetUser();
2325      size_t input_index = env_use_node->GetIndex();
2326      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2327      before_env_use_node = env_use_node;
2328    }
2329  }
2330
2331  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2332    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2333    if (next != env_uses_.end()) {
2334      HEnvironment* next_user = next->GetUser();
2335      size_t next_index = next->GetIndex();
2336      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2337      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2338    }
2339  }
2340
2341  HInstruction* previous_;
2342  HInstruction* next_;
2343  HBasicBlock* block_;
2344  const uint32_t dex_pc_;
2345
2346  // An instruction gets an id when it is added to the graph.
2347  // It reflects creation order. A negative id means the instruction
2348  // has not been added to the graph.
2349  int id_;
2350
2351  // When doing liveness analysis, instructions that have uses get an SSA index.
2352  int ssa_index_;
2353
2354  // Packed fields.
2355  uint32_t packed_fields_;
2356
2357  // List of instructions that have this instruction as input.
2358  HUseList<HInstruction*> uses_;
2359
2360  // List of environments that contain this instruction.
2361  HUseList<HEnvironment*> env_uses_;
2362
2363  // The environment associated with this instruction. Not null if the instruction
2364  // might jump out of the method.
2365  HEnvironment* environment_;
2366
2367  // Set by the code generator.
2368  LocationSummary* locations_;
2369
2370  // Set by the liveness analysis.
2371  LiveInterval* live_interval_;
2372
2373  // Set by the liveness analysis, this is the position in a linear
2374  // order of blocks where this instruction's live interval start.
2375  size_t lifetime_position_;
2376
2377  SideEffects side_effects_;
2378
2379  // The reference handle part of the reference type info.
2380  // The IsExact() flag is stored in packed fields.
2381  // TODO: for primitive types this should be marked as invalid.
2382  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2383
2384  friend class GraphChecker;
2385  friend class HBasicBlock;
2386  friend class HEnvironment;
2387  friend class HGraph;
2388  friend class HInstructionList;
2389
2390  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2391};
2392std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2393
2394// Iterates over the instructions, while preserving the next instruction
2395// in case the current instruction gets removed from the list by the user
2396// of this iterator.
2397class HInstructionIterator : public ValueObject {
2398 public:
2399  explicit HInstructionIterator(const HInstructionList& instructions)
2400      : instruction_(instructions.first_instruction_) {
2401    next_ = Done() ? nullptr : instruction_->GetNext();
2402  }
2403
2404  bool Done() const { return instruction_ == nullptr; }
2405  HInstruction* Current() const { return instruction_; }
2406  void Advance() {
2407    instruction_ = next_;
2408    next_ = Done() ? nullptr : instruction_->GetNext();
2409  }
2410
2411 private:
2412  HInstruction* instruction_;
2413  HInstruction* next_;
2414
2415  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2416};
2417
2418// Iterates over the instructions without saving the next instruction,
2419// therefore handling changes in the graph potentially made by the user
2420// of this iterator.
2421class HInstructionIteratorHandleChanges : public ValueObject {
2422 public:
2423  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2424      : instruction_(instructions.first_instruction_) {
2425  }
2426
2427  bool Done() const { return instruction_ == nullptr; }
2428  HInstruction* Current() const { return instruction_; }
2429  void Advance() {
2430    instruction_ = instruction_->GetNext();
2431  }
2432
2433 private:
2434  HInstruction* instruction_;
2435
2436  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2437};
2438
2439
2440class HBackwardInstructionIterator : public ValueObject {
2441 public:
2442  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2443      : instruction_(instructions.last_instruction_) {
2444    next_ = Done() ? nullptr : instruction_->GetPrevious();
2445  }
2446
2447  bool Done() const { return instruction_ == nullptr; }
2448  HInstruction* Current() const { return instruction_; }
2449  void Advance() {
2450    instruction_ = next_;
2451    next_ = Done() ? nullptr : instruction_->GetPrevious();
2452  }
2453
2454 private:
2455  HInstruction* instruction_;
2456  HInstruction* next_;
2457
2458  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2459};
2460
2461class HVariableInputSizeInstruction : public HInstruction {
2462 public:
2463  using HInstruction::GetInputRecords;  // Keep the const version visible.
2464  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2465    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2466  }
2467
2468  void AddInput(HInstruction* input);
2469  void InsertInputAt(size_t index, HInstruction* input);
2470  void RemoveInputAt(size_t index);
2471
2472  // Removes all the inputs.
2473  // Also removes this instructions from each input's use list
2474  // (for non-environment uses only).
2475  void RemoveAllInputs();
2476
2477 protected:
2478  HVariableInputSizeInstruction(SideEffects side_effects,
2479                                uint32_t dex_pc,
2480                                ArenaAllocator* allocator,
2481                                size_t number_of_inputs,
2482                                ArenaAllocKind kind)
2483      : HInstruction(side_effects, dex_pc),
2484        inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2485
2486  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2487
2488 private:
2489  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2490};
2491
2492template<size_t N>
2493class HTemplateInstruction: public HInstruction {
2494 public:
2495  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2496      : HInstruction(side_effects, dex_pc), inputs_() {}
2497  virtual ~HTemplateInstruction() {}
2498
2499  using HInstruction::GetInputRecords;  // Keep the const version visible.
2500  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2501    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2502  }
2503
2504 private:
2505  std::array<HUserRecord<HInstruction*>, N> inputs_;
2506
2507  friend class SsaBuilder;
2508};
2509
2510// HTemplateInstruction specialization for N=0.
2511template<>
2512class HTemplateInstruction<0>: public HInstruction {
2513 public:
2514  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2515      : HInstruction(side_effects, dex_pc) {}
2516
2517  virtual ~HTemplateInstruction() {}
2518
2519  using HInstruction::GetInputRecords;  // Keep the const version visible.
2520  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2521    return ArrayRef<HUserRecord<HInstruction*>>();
2522  }
2523
2524 private:
2525  friend class SsaBuilder;
2526};
2527
2528template<intptr_t N>
2529class HExpression : public HTemplateInstruction<N> {
2530 public:
2531  HExpression<N>(DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2532      : HTemplateInstruction<N>(side_effects, dex_pc) {
2533    this->template SetPackedField<TypeField>(type);
2534  }
2535  virtual ~HExpression() {}
2536
2537  DataType::Type GetType() const OVERRIDE {
2538    return TypeField::Decode(this->GetPackedFields());
2539  }
2540
2541 protected:
2542  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2543  static constexpr size_t kFieldTypeSize =
2544      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2545  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2546  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2547                "Too many packed fields.");
2548  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2549};
2550
2551// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2552// instruction that branches to the exit block.
2553class HReturnVoid FINAL : public HTemplateInstruction<0> {
2554 public:
2555  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2556      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2557
2558  bool IsControlFlow() const OVERRIDE { return true; }
2559
2560  DECLARE_INSTRUCTION(ReturnVoid);
2561
2562 private:
2563  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2564};
2565
2566// Represents dex's RETURN opcodes. A HReturn is a control flow
2567// instruction that branches to the exit block.
2568class HReturn FINAL : public HTemplateInstruction<1> {
2569 public:
2570  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2571      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2572    SetRawInputAt(0, value);
2573  }
2574
2575  bool IsControlFlow() const OVERRIDE { return true; }
2576
2577  DECLARE_INSTRUCTION(Return);
2578
2579 private:
2580  DISALLOW_COPY_AND_ASSIGN(HReturn);
2581};
2582
2583class HPhi FINAL : public HVariableInputSizeInstruction {
2584 public:
2585  HPhi(ArenaAllocator* allocator,
2586       uint32_t reg_number,
2587       size_t number_of_inputs,
2588       DataType::Type type,
2589       uint32_t dex_pc = kNoDexPc)
2590      : HVariableInputSizeInstruction(
2591            SideEffects::None(),
2592            dex_pc,
2593            allocator,
2594            number_of_inputs,
2595            kArenaAllocPhiInputs),
2596        reg_number_(reg_number) {
2597    SetPackedField<TypeField>(ToPhiType(type));
2598    DCHECK_NE(GetType(), DataType::Type::kVoid);
2599    // Phis are constructed live and marked dead if conflicting or unused.
2600    // Individual steps of SsaBuilder should assume that if a phi has been
2601    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2602    SetPackedFlag<kFlagIsLive>(true);
2603    SetPackedFlag<kFlagCanBeNull>(true);
2604  }
2605
2606  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2607  static DataType::Type ToPhiType(DataType::Type type) {
2608    return DataType::Kind(type);
2609  }
2610
2611  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2612
2613  DataType::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2614  void SetType(DataType::Type new_type) {
2615    // Make sure that only valid type changes occur. The following are allowed:
2616    //  (1) int  -> float/ref (primitive type propagation),
2617    //  (2) long -> double (primitive type propagation).
2618    DCHECK(GetType() == new_type ||
2619           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2620           (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2621           (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2622    SetPackedField<TypeField>(new_type);
2623  }
2624
2625  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2626  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2627
2628  uint32_t GetRegNumber() const { return reg_number_; }
2629
2630  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2631  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2632  bool IsDead() const { return !IsLive(); }
2633  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2634
2635  bool IsVRegEquivalentOf(const HInstruction* other) const {
2636    return other != nullptr
2637        && other->IsPhi()
2638        && other->AsPhi()->GetBlock() == GetBlock()
2639        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2640  }
2641
2642  bool HasEquivalentPhi() const {
2643    if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2644      return true;
2645    }
2646    if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2647      return true;
2648    }
2649    return false;
2650  }
2651
2652  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2653  // An equivalent phi is a phi having the same dex register and type.
2654  // It assumes that phis with the same dex register are adjacent.
2655  HPhi* GetNextEquivalentPhiWithSameType() {
2656    HInstruction* next = GetNext();
2657    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2658      if (next->GetType() == GetType()) {
2659        return next->AsPhi();
2660      }
2661      next = next->GetNext();
2662    }
2663    return nullptr;
2664  }
2665
2666  DECLARE_INSTRUCTION(Phi);
2667
2668 private:
2669  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2670  static constexpr size_t kFieldTypeSize =
2671      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2672  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2673  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2674  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2675  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2676  using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2677
2678  const uint32_t reg_number_;
2679
2680  DISALLOW_COPY_AND_ASSIGN(HPhi);
2681};
2682
2683// The exit instruction is the only instruction of the exit block.
2684// Instructions aborting the method (HThrow and HReturn) must branch to the
2685// exit block.
2686class HExit FINAL : public HTemplateInstruction<0> {
2687 public:
2688  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2689
2690  bool IsControlFlow() const OVERRIDE { return true; }
2691
2692  DECLARE_INSTRUCTION(Exit);
2693
2694 private:
2695  DISALLOW_COPY_AND_ASSIGN(HExit);
2696};
2697
2698// Jumps from one block to another.
2699class HGoto FINAL : public HTemplateInstruction<0> {
2700 public:
2701  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2702
2703  bool IsControlFlow() const OVERRIDE { return true; }
2704
2705  HBasicBlock* GetSuccessor() const {
2706    return GetBlock()->GetSingleSuccessor();
2707  }
2708
2709  DECLARE_INSTRUCTION(Goto);
2710
2711 private:
2712  DISALLOW_COPY_AND_ASSIGN(HGoto);
2713};
2714
2715class HConstant : public HExpression<0> {
2716 public:
2717  explicit HConstant(DataType::Type type, uint32_t dex_pc = kNoDexPc)
2718      : HExpression(type, SideEffects::None(), dex_pc) {}
2719
2720  bool CanBeMoved() const OVERRIDE { return true; }
2721
2722  // Is this constant -1 in the arithmetic sense?
2723  virtual bool IsMinusOne() const { return false; }
2724  // Is this constant 0 in the arithmetic sense?
2725  virtual bool IsArithmeticZero() const { return false; }
2726  // Is this constant a 0-bit pattern?
2727  virtual bool IsZeroBitPattern() const { return false; }
2728  // Is this constant 1 in the arithmetic sense?
2729  virtual bool IsOne() const { return false; }
2730
2731  virtual uint64_t GetValueAsUint64() const = 0;
2732
2733  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2734
2735 private:
2736  DISALLOW_COPY_AND_ASSIGN(HConstant);
2737};
2738
2739class HNullConstant FINAL : public HConstant {
2740 public:
2741  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2742    return true;
2743  }
2744
2745  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2746
2747  size_t ComputeHashCode() const OVERRIDE { return 0; }
2748
2749  // The null constant representation is a 0-bit pattern.
2750  virtual bool IsZeroBitPattern() const { return true; }
2751
2752  DECLARE_INSTRUCTION(NullConstant);
2753
2754 private:
2755  explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
2756      : HConstant(DataType::Type::kReference, dex_pc) {}
2757
2758  friend class HGraph;
2759  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2760};
2761
2762// Constants of the type int. Those can be from Dex instructions, or
2763// synthesized (for example with the if-eqz instruction).
2764class HIntConstant FINAL : public HConstant {
2765 public:
2766  int32_t GetValue() const { return value_; }
2767
2768  uint64_t GetValueAsUint64() const OVERRIDE {
2769    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2770  }
2771
2772  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2773    DCHECK(other->IsIntConstant()) << other->DebugName();
2774    return other->AsIntConstant()->value_ == value_;
2775  }
2776
2777  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2778
2779  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2780  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2781  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2782  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2783
2784  // Integer constants are used to encode Boolean values as well,
2785  // where 1 means true and 0 means false.
2786  bool IsTrue() const { return GetValue() == 1; }
2787  bool IsFalse() const { return GetValue() == 0; }
2788
2789  DECLARE_INSTRUCTION(IntConstant);
2790
2791 private:
2792  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2793      : HConstant(DataType::Type::kInt32, dex_pc), value_(value) {}
2794  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2795      : HConstant(DataType::Type::kInt32, dex_pc), value_(value ? 1 : 0) {}
2796
2797  const int32_t value_;
2798
2799  friend class HGraph;
2800  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2801  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2802  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2803};
2804
2805class HLongConstant FINAL : public HConstant {
2806 public:
2807  int64_t GetValue() const { return value_; }
2808
2809  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2810
2811  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2812    DCHECK(other->IsLongConstant()) << other->DebugName();
2813    return other->AsLongConstant()->value_ == value_;
2814  }
2815
2816  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2817
2818  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2819  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2820  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2821  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2822
2823  DECLARE_INSTRUCTION(LongConstant);
2824
2825 private:
2826  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2827      : HConstant(DataType::Type::kInt64, dex_pc), value_(value) {}
2828
2829  const int64_t value_;
2830
2831  friend class HGraph;
2832  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2833};
2834
2835class HFloatConstant FINAL : public HConstant {
2836 public:
2837  float GetValue() const { return value_; }
2838
2839  uint64_t GetValueAsUint64() const OVERRIDE {
2840    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2841  }
2842
2843  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2844    DCHECK(other->IsFloatConstant()) << other->DebugName();
2845    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2846  }
2847
2848  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2849
2850  bool IsMinusOne() const OVERRIDE {
2851    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2852  }
2853  bool IsArithmeticZero() const OVERRIDE {
2854    return std::fpclassify(value_) == FP_ZERO;
2855  }
2856  bool IsArithmeticPositiveZero() const {
2857    return IsArithmeticZero() && !std::signbit(value_);
2858  }
2859  bool IsArithmeticNegativeZero() const {
2860    return IsArithmeticZero() && std::signbit(value_);
2861  }
2862  bool IsZeroBitPattern() const OVERRIDE {
2863    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2864  }
2865  bool IsOne() const OVERRIDE {
2866    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2867  }
2868  bool IsNaN() const {
2869    return std::isnan(value_);
2870  }
2871
2872  DECLARE_INSTRUCTION(FloatConstant);
2873
2874 private:
2875  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2876      : HConstant(DataType::Type::kFloat32, dex_pc), value_(value) {}
2877  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2878      : HConstant(DataType::Type::kFloat32, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2879
2880  const float value_;
2881
2882  // Only the SsaBuilder and HGraph can create floating-point constants.
2883  friend class SsaBuilder;
2884  friend class HGraph;
2885  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2886};
2887
2888class HDoubleConstant FINAL : public HConstant {
2889 public:
2890  double GetValue() const { return value_; }
2891
2892  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2893
2894  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2895    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2896    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2897  }
2898
2899  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2900
2901  bool IsMinusOne() const OVERRIDE {
2902    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2903  }
2904  bool IsArithmeticZero() const OVERRIDE {
2905    return std::fpclassify(value_) == FP_ZERO;
2906  }
2907  bool IsArithmeticPositiveZero() const {
2908    return IsArithmeticZero() && !std::signbit(value_);
2909  }
2910  bool IsArithmeticNegativeZero() const {
2911    return IsArithmeticZero() && std::signbit(value_);
2912  }
2913  bool IsZeroBitPattern() const OVERRIDE {
2914    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2915  }
2916  bool IsOne() const OVERRIDE {
2917    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2918  }
2919  bool IsNaN() const {
2920    return std::isnan(value_);
2921  }
2922
2923  DECLARE_INSTRUCTION(DoubleConstant);
2924
2925 private:
2926  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2927      : HConstant(DataType::Type::kFloat64, dex_pc), value_(value) {}
2928  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2929      : HConstant(DataType::Type::kFloat64, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2930
2931  const double value_;
2932
2933  // Only the SsaBuilder and HGraph can create floating-point constants.
2934  friend class SsaBuilder;
2935  friend class HGraph;
2936  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2937};
2938
2939// Conditional branch. A block ending with an HIf instruction must have
2940// two successors.
2941class HIf FINAL : public HTemplateInstruction<1> {
2942 public:
2943  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2944      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2945    SetRawInputAt(0, input);
2946  }
2947
2948  bool IsControlFlow() const OVERRIDE { return true; }
2949
2950  HBasicBlock* IfTrueSuccessor() const {
2951    return GetBlock()->GetSuccessors()[0];
2952  }
2953
2954  HBasicBlock* IfFalseSuccessor() const {
2955    return GetBlock()->GetSuccessors()[1];
2956  }
2957
2958  DECLARE_INSTRUCTION(If);
2959
2960 private:
2961  DISALLOW_COPY_AND_ASSIGN(HIf);
2962};
2963
2964
2965// Abstract instruction which marks the beginning and/or end of a try block and
2966// links it to the respective exception handlers. Behaves the same as a Goto in
2967// non-exceptional control flow.
2968// Normal-flow successor is stored at index zero, exception handlers under
2969// higher indices in no particular order.
2970class HTryBoundary FINAL : public HTemplateInstruction<0> {
2971 public:
2972  enum class BoundaryKind {
2973    kEntry,
2974    kExit,
2975    kLast = kExit
2976  };
2977
2978  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2979      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2980    SetPackedField<BoundaryKindField>(kind);
2981  }
2982
2983  bool IsControlFlow() const OVERRIDE { return true; }
2984
2985  // Returns the block's non-exceptional successor (index zero).
2986  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2987
2988  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2989    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2990  }
2991
2992  // Returns whether `handler` is among its exception handlers (non-zero index
2993  // successors).
2994  bool HasExceptionHandler(const HBasicBlock& handler) const {
2995    DCHECK(handler.IsCatchBlock());
2996    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2997  }
2998
2999  // If not present already, adds `handler` to its block's list of exception
3000  // handlers.
3001  void AddExceptionHandler(HBasicBlock* handler) {
3002    if (!HasExceptionHandler(*handler)) {
3003      GetBlock()->AddSuccessor(handler);
3004    }
3005  }
3006
3007  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3008  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3009
3010  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3011
3012  DECLARE_INSTRUCTION(TryBoundary);
3013
3014 private:
3015  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3016  static constexpr size_t kFieldBoundaryKindSize =
3017      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3018  static constexpr size_t kNumberOfTryBoundaryPackedBits =
3019      kFieldBoundaryKind + kFieldBoundaryKindSize;
3020  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3021                "Too many packed fields.");
3022  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3023
3024  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
3025};
3026
3027// Deoptimize to interpreter, upon checking a condition.
3028class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3029 public:
3030  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3031  // across.
3032  HDeoptimize(ArenaAllocator* allocator,
3033              HInstruction* cond,
3034              DeoptimizationKind kind,
3035              uint32_t dex_pc)
3036      : HVariableInputSizeInstruction(
3037            SideEffects::All(),
3038            dex_pc,
3039            allocator,
3040            /* number_of_inputs */ 1,
3041            kArenaAllocMisc) {
3042    SetPackedFlag<kFieldCanBeMoved>(false);
3043    SetPackedField<DeoptimizeKindField>(kind);
3044    SetRawInputAt(0, cond);
3045  }
3046
3047  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3048  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3049  // instead of `guard`.
3050  // We set CanTriggerGC to prevent any intermediate address to be live
3051  // at the point of the `HDeoptimize`.
3052  HDeoptimize(ArenaAllocator* allocator,
3053              HInstruction* cond,
3054              HInstruction* guard,
3055              DeoptimizationKind kind,
3056              uint32_t dex_pc)
3057      : HVariableInputSizeInstruction(
3058            SideEffects::CanTriggerGC(),
3059            dex_pc,
3060            allocator,
3061            /* number_of_inputs */ 2,
3062            kArenaAllocMisc) {
3063    SetPackedFlag<kFieldCanBeMoved>(true);
3064    SetPackedField<DeoptimizeKindField>(kind);
3065    SetRawInputAt(0, cond);
3066    SetRawInputAt(1, guard);
3067  }
3068
3069  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3070
3071  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3072    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3073  }
3074
3075  bool NeedsEnvironment() const OVERRIDE { return true; }
3076
3077  bool CanThrow() const OVERRIDE { return true; }
3078
3079  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3080
3081  DataType::Type GetType() const OVERRIDE {
3082    return GuardsAnInput() ? GuardedInput()->GetType() : DataType::Type::kVoid;
3083  }
3084
3085  bool GuardsAnInput() const {
3086    return InputCount() == 2;
3087  }
3088
3089  HInstruction* GuardedInput() const {
3090    DCHECK(GuardsAnInput());
3091    return InputAt(1);
3092  }
3093
3094  void RemoveGuard() {
3095    RemoveInputAt(1);
3096  }
3097
3098  DECLARE_INSTRUCTION(Deoptimize);
3099
3100 private:
3101  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3102  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3103  static constexpr size_t kFieldDeoptimizeKindSize =
3104      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3105  static constexpr size_t kNumberOfDeoptimizePackedBits =
3106      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3107  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3108                "Too many packed fields.");
3109  using DeoptimizeKindField =
3110      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3111
3112  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3113};
3114
3115// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3116// The compiled code checks this flag value in a guard before devirtualized call and
3117// if it's true, starts to do deoptimization.
3118// It has a 4-byte slot on stack.
3119// TODO: allocate a register for this flag.
3120class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3121 public:
3122  // CHA guards are only optimized in a separate pass and it has no side effects
3123  // with regard to other passes.
3124  HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3125      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, allocator, 0, kArenaAllocCHA) {
3126  }
3127
3128  DataType::Type GetType() const OVERRIDE { return DataType::Type::kInt32; }
3129
3130  // We do all CHA guard elimination/motion in a single pass, after which there is no
3131  // further guard elimination/motion since a guard might have been used for justification
3132  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3133  // to avoid other optimizations trying to move it.
3134  bool CanBeMoved() const OVERRIDE { return false; }
3135
3136  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3137
3138 private:
3139  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3140};
3141
3142// Represents the ArtMethod that was passed as a first argument to
3143// the method. It is used by instructions that depend on it, like
3144// instructions that work with the dex cache.
3145class HCurrentMethod FINAL : public HExpression<0> {
3146 public:
3147  explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3148      : HExpression(type, SideEffects::None(), dex_pc) {}
3149
3150  DECLARE_INSTRUCTION(CurrentMethod);
3151
3152 private:
3153  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3154};
3155
3156// Fetches an ArtMethod from the virtual table or the interface method table
3157// of a class.
3158class HClassTableGet FINAL : public HExpression<1> {
3159 public:
3160  enum class TableKind {
3161    kVTable,
3162    kIMTable,
3163    kLast = kIMTable
3164  };
3165  HClassTableGet(HInstruction* cls,
3166                 DataType::Type type,
3167                 TableKind kind,
3168                 size_t index,
3169                 uint32_t dex_pc)
3170      : HExpression(type, SideEffects::None(), dex_pc),
3171        index_(index) {
3172    SetPackedField<TableKindField>(kind);
3173    SetRawInputAt(0, cls);
3174  }
3175
3176  bool CanBeMoved() const OVERRIDE { return true; }
3177  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3178    return other->AsClassTableGet()->GetIndex() == index_ &&
3179        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3180  }
3181
3182  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3183  size_t GetIndex() const { return index_; }
3184
3185  DECLARE_INSTRUCTION(ClassTableGet);
3186
3187 private:
3188  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3189  static constexpr size_t kFieldTableKindSize =
3190      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3191  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3192  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3193                "Too many packed fields.");
3194  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3195
3196  // The index of the ArtMethod in the table.
3197  const size_t index_;
3198
3199  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3200};
3201
3202// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3203// have one successor for each entry in the switch table, and the final successor
3204// will be the block containing the next Dex opcode.
3205class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3206 public:
3207  HPackedSwitch(int32_t start_value,
3208                uint32_t num_entries,
3209                HInstruction* input,
3210                uint32_t dex_pc = kNoDexPc)
3211    : HTemplateInstruction(SideEffects::None(), dex_pc),
3212      start_value_(start_value),
3213      num_entries_(num_entries) {
3214    SetRawInputAt(0, input);
3215  }
3216
3217  bool IsControlFlow() const OVERRIDE { return true; }
3218
3219  int32_t GetStartValue() const { return start_value_; }
3220
3221  uint32_t GetNumEntries() const { return num_entries_; }
3222
3223  HBasicBlock* GetDefaultBlock() const {
3224    // Last entry is the default block.
3225    return GetBlock()->GetSuccessors()[num_entries_];
3226  }
3227  DECLARE_INSTRUCTION(PackedSwitch);
3228
3229 private:
3230  const int32_t start_value_;
3231  const uint32_t num_entries_;
3232
3233  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3234};
3235
3236class HUnaryOperation : public HExpression<1> {
3237 public:
3238  HUnaryOperation(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3239      : HExpression(result_type, SideEffects::None(), dex_pc) {
3240    SetRawInputAt(0, input);
3241  }
3242
3243  HInstruction* GetInput() const { return InputAt(0); }
3244  DataType::Type GetResultType() const { return GetType(); }
3245
3246  bool CanBeMoved() const OVERRIDE { return true; }
3247  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3248    return true;
3249  }
3250
3251  // Try to statically evaluate `this` and return a HConstant
3252  // containing the result of this evaluation.  If `this` cannot
3253  // be evaluated as a constant, return null.
3254  HConstant* TryStaticEvaluation() const;
3255
3256  // Apply this operation to `x`.
3257  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3258  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3259  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3260  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3261
3262  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3263
3264 private:
3265  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3266};
3267
3268class HBinaryOperation : public HExpression<2> {
3269 public:
3270  HBinaryOperation(DataType::Type result_type,
3271                   HInstruction* left,
3272                   HInstruction* right,
3273                   SideEffects side_effects = SideEffects::None(),
3274                   uint32_t dex_pc = kNoDexPc)
3275      : HExpression(result_type, side_effects, dex_pc) {
3276    SetRawInputAt(0, left);
3277    SetRawInputAt(1, right);
3278  }
3279
3280  HInstruction* GetLeft() const { return InputAt(0); }
3281  HInstruction* GetRight() const { return InputAt(1); }
3282  DataType::Type GetResultType() const { return GetType(); }
3283
3284  virtual bool IsCommutative() const { return false; }
3285
3286  // Put constant on the right.
3287  // Returns whether order is changed.
3288  bool OrderInputsWithConstantOnTheRight() {
3289    HInstruction* left = InputAt(0);
3290    HInstruction* right = InputAt(1);
3291    if (left->IsConstant() && !right->IsConstant()) {
3292      ReplaceInput(right, 0);
3293      ReplaceInput(left, 1);
3294      return true;
3295    }
3296    return false;
3297  }
3298
3299  // Order inputs by instruction id, but favor constant on the right side.
3300  // This helps GVN for commutative ops.
3301  void OrderInputs() {
3302    DCHECK(IsCommutative());
3303    HInstruction* left = InputAt(0);
3304    HInstruction* right = InputAt(1);
3305    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3306      return;
3307    }
3308    if (OrderInputsWithConstantOnTheRight()) {
3309      return;
3310    }
3311    // Order according to instruction id.
3312    if (left->GetId() > right->GetId()) {
3313      ReplaceInput(right, 0);
3314      ReplaceInput(left, 1);
3315    }
3316  }
3317
3318  bool CanBeMoved() const OVERRIDE { return true; }
3319  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3320    return true;
3321  }
3322
3323  // Try to statically evaluate `this` and return a HConstant
3324  // containing the result of this evaluation.  If `this` cannot
3325  // be evaluated as a constant, return null.
3326  HConstant* TryStaticEvaluation() const;
3327
3328  // Apply this operation to `x` and `y`.
3329  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3330                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3331    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3332    UNREACHABLE();
3333  }
3334  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3335  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3336  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3337                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3338    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3339    UNREACHABLE();
3340  }
3341  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3342  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3343
3344  // Returns an input that can legally be used as the right input and is
3345  // constant, or null.
3346  HConstant* GetConstantRight() const;
3347
3348  // If `GetConstantRight()` returns one of the input, this returns the other
3349  // one. Otherwise it returns null.
3350  HInstruction* GetLeastConstantLeft() const;
3351
3352  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3353
3354 private:
3355  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3356};
3357
3358// The comparison bias applies for floating point operations and indicates how NaN
3359// comparisons are treated:
3360enum class ComparisonBias {
3361  kNoBias,  // bias is not applicable (i.e. for long operation)
3362  kGtBias,  // return 1 for NaN comparisons
3363  kLtBias,  // return -1 for NaN comparisons
3364  kLast = kLtBias
3365};
3366
3367std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3368
3369class HCondition : public HBinaryOperation {
3370 public:
3371  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3372      : HBinaryOperation(DataType::Type::kBool, first, second, SideEffects::None(), dex_pc) {
3373    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3374  }
3375
3376  // For code generation purposes, returns whether this instruction is just before
3377  // `instruction`, and disregard moves in between.
3378  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3379
3380  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3381
3382  virtual IfCondition GetCondition() const = 0;
3383
3384  virtual IfCondition GetOppositeCondition() const = 0;
3385
3386  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3387  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3388
3389  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3390  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3391
3392  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3393    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3394  }
3395
3396  bool IsFPConditionTrueIfNaN() const {
3397    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3398    IfCondition if_cond = GetCondition();
3399    if (if_cond == kCondNE) {
3400      return true;
3401    } else if (if_cond == kCondEQ) {
3402      return false;
3403    }
3404    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3405  }
3406
3407  bool IsFPConditionFalseIfNaN() const {
3408    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3409    IfCondition if_cond = GetCondition();
3410    if (if_cond == kCondEQ) {
3411      return true;
3412    } else if (if_cond == kCondNE) {
3413      return false;
3414    }
3415    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3416  }
3417
3418 protected:
3419  // Needed if we merge a HCompare into a HCondition.
3420  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3421  static constexpr size_t kFieldComparisonBiasSize =
3422      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3423  static constexpr size_t kNumberOfConditionPackedBits =
3424      kFieldComparisonBias + kFieldComparisonBiasSize;
3425  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3426  using ComparisonBiasField =
3427      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3428
3429  template <typename T>
3430  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3431
3432  template <typename T>
3433  int32_t CompareFP(T x, T y) const {
3434    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3435    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3436    // Handle the bias.
3437    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3438  }
3439
3440  // Return an integer constant containing the result of a condition evaluated at compile time.
3441  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3442    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3443  }
3444
3445 private:
3446  DISALLOW_COPY_AND_ASSIGN(HCondition);
3447};
3448
3449// Instruction to check if two inputs are equal to each other.
3450class HEqual FINAL : public HCondition {
3451 public:
3452  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3453      : HCondition(first, second, dex_pc) {}
3454
3455  bool IsCommutative() const OVERRIDE { return true; }
3456
3457  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3458                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3459    return MakeConstantCondition(true, GetDexPc());
3460  }
3461  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3462    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3463  }
3464  // In the following Evaluate methods, a HCompare instruction has
3465  // been merged into this HEqual instruction; evaluate it as
3466  // `Compare(x, y) == 0`.
3467  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3468    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3469                                 GetDexPc());
3470  }
3471  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3472    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3473  }
3474  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3475    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3476  }
3477
3478  DECLARE_INSTRUCTION(Equal);
3479
3480  IfCondition GetCondition() const OVERRIDE {
3481    return kCondEQ;
3482  }
3483
3484  IfCondition GetOppositeCondition() const OVERRIDE {
3485    return kCondNE;
3486  }
3487
3488 private:
3489  template <typename T> static bool Compute(T x, T y) { return x == y; }
3490
3491  DISALLOW_COPY_AND_ASSIGN(HEqual);
3492};
3493
3494class HNotEqual FINAL : public HCondition {
3495 public:
3496  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3497      : HCondition(first, second, dex_pc) {}
3498
3499  bool IsCommutative() const OVERRIDE { return true; }
3500
3501  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3502                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3503    return MakeConstantCondition(false, GetDexPc());
3504  }
3505  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3506    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3507  }
3508  // In the following Evaluate methods, a HCompare instruction has
3509  // been merged into this HNotEqual instruction; evaluate it as
3510  // `Compare(x, y) != 0`.
3511  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3512    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3513  }
3514  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3515    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3516  }
3517  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3518    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3519  }
3520
3521  DECLARE_INSTRUCTION(NotEqual);
3522
3523  IfCondition GetCondition() const OVERRIDE {
3524    return kCondNE;
3525  }
3526
3527  IfCondition GetOppositeCondition() const OVERRIDE {
3528    return kCondEQ;
3529  }
3530
3531 private:
3532  template <typename T> static bool Compute(T x, T y) { return x != y; }
3533
3534  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3535};
3536
3537class HLessThan FINAL : public HCondition {
3538 public:
3539  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3540      : HCondition(first, second, dex_pc) {}
3541
3542  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3543    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3544  }
3545  // In the following Evaluate methods, a HCompare instruction has
3546  // been merged into this HLessThan instruction; evaluate it as
3547  // `Compare(x, y) < 0`.
3548  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3549    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3550  }
3551  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3552    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3553  }
3554  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3555    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3556  }
3557
3558  DECLARE_INSTRUCTION(LessThan);
3559
3560  IfCondition GetCondition() const OVERRIDE {
3561    return kCondLT;
3562  }
3563
3564  IfCondition GetOppositeCondition() const OVERRIDE {
3565    return kCondGE;
3566  }
3567
3568 private:
3569  template <typename T> static bool Compute(T x, T y) { return x < y; }
3570
3571  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3572};
3573
3574class HLessThanOrEqual FINAL : public HCondition {
3575 public:
3576  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3577      : HCondition(first, second, dex_pc) {}
3578
3579  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3580    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3581  }
3582  // In the following Evaluate methods, a HCompare instruction has
3583  // been merged into this HLessThanOrEqual instruction; evaluate it as
3584  // `Compare(x, y) <= 0`.
3585  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3586    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3587  }
3588  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3589    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3590  }
3591  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3592    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3593  }
3594
3595  DECLARE_INSTRUCTION(LessThanOrEqual);
3596
3597  IfCondition GetCondition() const OVERRIDE {
3598    return kCondLE;
3599  }
3600
3601  IfCondition GetOppositeCondition() const OVERRIDE {
3602    return kCondGT;
3603  }
3604
3605 private:
3606  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3607
3608  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3609};
3610
3611class HGreaterThan FINAL : public HCondition {
3612 public:
3613  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3614      : HCondition(first, second, dex_pc) {}
3615
3616  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3617    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3618  }
3619  // In the following Evaluate methods, a HCompare instruction has
3620  // been merged into this HGreaterThan instruction; evaluate it as
3621  // `Compare(x, y) > 0`.
3622  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3623    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3624  }
3625  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3626    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3627  }
3628  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3629    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3630  }
3631
3632  DECLARE_INSTRUCTION(GreaterThan);
3633
3634  IfCondition GetCondition() const OVERRIDE {
3635    return kCondGT;
3636  }
3637
3638  IfCondition GetOppositeCondition() const OVERRIDE {
3639    return kCondLE;
3640  }
3641
3642 private:
3643  template <typename T> static bool Compute(T x, T y) { return x > y; }
3644
3645  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3646};
3647
3648class HGreaterThanOrEqual FINAL : public HCondition {
3649 public:
3650  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3651      : HCondition(first, second, dex_pc) {}
3652
3653  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3654    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3655  }
3656  // In the following Evaluate methods, a HCompare instruction has
3657  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3658  // `Compare(x, y) >= 0`.
3659  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3660    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3661  }
3662  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3663    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3664  }
3665  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3666    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3667  }
3668
3669  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3670
3671  IfCondition GetCondition() const OVERRIDE {
3672    return kCondGE;
3673  }
3674
3675  IfCondition GetOppositeCondition() const OVERRIDE {
3676    return kCondLT;
3677  }
3678
3679 private:
3680  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3681
3682  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3683};
3684
3685class HBelow FINAL : public HCondition {
3686 public:
3687  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3688      : HCondition(first, second, dex_pc) {}
3689
3690  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3691    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3692  }
3693  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3694    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3695  }
3696  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3697                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3698    LOG(FATAL) << DebugName() << " is not defined for float values";
3699    UNREACHABLE();
3700  }
3701  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3702                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3703    LOG(FATAL) << DebugName() << " is not defined for double values";
3704    UNREACHABLE();
3705  }
3706
3707  DECLARE_INSTRUCTION(Below);
3708
3709  IfCondition GetCondition() const OVERRIDE {
3710    return kCondB;
3711  }
3712
3713  IfCondition GetOppositeCondition() const OVERRIDE {
3714    return kCondAE;
3715  }
3716
3717 private:
3718  template <typename T> static bool Compute(T x, T y) {
3719    return MakeUnsigned(x) < MakeUnsigned(y);
3720  }
3721
3722  DISALLOW_COPY_AND_ASSIGN(HBelow);
3723};
3724
3725class HBelowOrEqual FINAL : public HCondition {
3726 public:
3727  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3728      : HCondition(first, second, dex_pc) {}
3729
3730  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3731    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3732  }
3733  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3734    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3735  }
3736  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3737                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3738    LOG(FATAL) << DebugName() << " is not defined for float values";
3739    UNREACHABLE();
3740  }
3741  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3742                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3743    LOG(FATAL) << DebugName() << " is not defined for double values";
3744    UNREACHABLE();
3745  }
3746
3747  DECLARE_INSTRUCTION(BelowOrEqual);
3748
3749  IfCondition GetCondition() const OVERRIDE {
3750    return kCondBE;
3751  }
3752
3753  IfCondition GetOppositeCondition() const OVERRIDE {
3754    return kCondA;
3755  }
3756
3757 private:
3758  template <typename T> static bool Compute(T x, T y) {
3759    return MakeUnsigned(x) <= MakeUnsigned(y);
3760  }
3761
3762  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3763};
3764
3765class HAbove FINAL : public HCondition {
3766 public:
3767  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3768      : HCondition(first, second, dex_pc) {}
3769
3770  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3771    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3772  }
3773  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3774    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3775  }
3776  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3777                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3778    LOG(FATAL) << DebugName() << " is not defined for float values";
3779    UNREACHABLE();
3780  }
3781  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3782                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3783    LOG(FATAL) << DebugName() << " is not defined for double values";
3784    UNREACHABLE();
3785  }
3786
3787  DECLARE_INSTRUCTION(Above);
3788
3789  IfCondition GetCondition() const OVERRIDE {
3790    return kCondA;
3791  }
3792
3793  IfCondition GetOppositeCondition() const OVERRIDE {
3794    return kCondBE;
3795  }
3796
3797 private:
3798  template <typename T> static bool Compute(T x, T y) {
3799    return MakeUnsigned(x) > MakeUnsigned(y);
3800  }
3801
3802  DISALLOW_COPY_AND_ASSIGN(HAbove);
3803};
3804
3805class HAboveOrEqual FINAL : public HCondition {
3806 public:
3807  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3808      : HCondition(first, second, dex_pc) {}
3809
3810  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3811    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3812  }
3813  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3814    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3815  }
3816  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3817                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3818    LOG(FATAL) << DebugName() << " is not defined for float values";
3819    UNREACHABLE();
3820  }
3821  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3822                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3823    LOG(FATAL) << DebugName() << " is not defined for double values";
3824    UNREACHABLE();
3825  }
3826
3827  DECLARE_INSTRUCTION(AboveOrEqual);
3828
3829  IfCondition GetCondition() const OVERRIDE {
3830    return kCondAE;
3831  }
3832
3833  IfCondition GetOppositeCondition() const OVERRIDE {
3834    return kCondB;
3835  }
3836
3837 private:
3838  template <typename T> static bool Compute(T x, T y) {
3839    return MakeUnsigned(x) >= MakeUnsigned(y);
3840  }
3841
3842  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3843};
3844
3845// Instruction to check how two inputs compare to each other.
3846// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3847class HCompare FINAL : public HBinaryOperation {
3848 public:
3849  // Note that `comparison_type` is the type of comparison performed
3850  // between the comparison's inputs, not the type of the instantiated
3851  // HCompare instruction (which is always DataType::Type::kInt).
3852  HCompare(DataType::Type comparison_type,
3853           HInstruction* first,
3854           HInstruction* second,
3855           ComparisonBias bias,
3856           uint32_t dex_pc)
3857      : HBinaryOperation(DataType::Type::kInt32,
3858                         first,
3859                         second,
3860                         SideEffectsForArchRuntimeCalls(comparison_type),
3861                         dex_pc) {
3862    SetPackedField<ComparisonBiasField>(bias);
3863    DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
3864    DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
3865  }
3866
3867  template <typename T>
3868  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3869
3870  template <typename T>
3871  int32_t ComputeFP(T x, T y) const {
3872    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3873    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3874    // Handle the bias.
3875    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3876  }
3877
3878  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3879    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3880    // reach this code path when processing a freshly built HIR
3881    // graph. However HCompare integer instructions can be synthesized
3882    // by the instruction simplifier to implement IntegerCompare and
3883    // IntegerSignum intrinsics, so we have to handle this case.
3884    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3885  }
3886  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3887    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3888  }
3889  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3890    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3891  }
3892  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3893    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3894  }
3895
3896  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3897    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3898  }
3899
3900  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3901
3902  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3903  // Only meaningful for floating-point comparisons.
3904  bool IsGtBias() const {
3905    DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3906    return GetBias() == ComparisonBias::kGtBias;
3907  }
3908
3909  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
3910    // Comparisons do not require a runtime call in any back end.
3911    return SideEffects::None();
3912  }
3913
3914  DECLARE_INSTRUCTION(Compare);
3915
3916 protected:
3917  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3918  static constexpr size_t kFieldComparisonBiasSize =
3919      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3920  static constexpr size_t kNumberOfComparePackedBits =
3921      kFieldComparisonBias + kFieldComparisonBiasSize;
3922  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3923  using ComparisonBiasField =
3924      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3925
3926  // Return an integer constant containing the result of a comparison evaluated at compile time.
3927  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3928    DCHECK(value == -1 || value == 0 || value == 1) << value;
3929    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3930  }
3931
3932 private:
3933  DISALLOW_COPY_AND_ASSIGN(HCompare);
3934};
3935
3936class HNewInstance FINAL : public HExpression<1> {
3937 public:
3938  HNewInstance(HInstruction* cls,
3939               uint32_t dex_pc,
3940               dex::TypeIndex type_index,
3941               const DexFile& dex_file,
3942               bool finalizable,
3943               QuickEntrypointEnum entrypoint)
3944      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc),
3945        type_index_(type_index),
3946        dex_file_(dex_file),
3947        entrypoint_(entrypoint) {
3948    SetPackedFlag<kFlagFinalizable>(finalizable);
3949    SetRawInputAt(0, cls);
3950  }
3951
3952  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3953  const DexFile& GetDexFile() const { return dex_file_; }
3954
3955  // Calls runtime so needs an environment.
3956  bool NeedsEnvironment() const OVERRIDE { return true; }
3957
3958  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3959  bool CanThrow() const OVERRIDE { return true; }
3960
3961  bool NeedsChecks() const {
3962    return entrypoint_ == kQuickAllocObjectWithChecks;
3963  }
3964
3965  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3966
3967  bool CanBeNull() const OVERRIDE { return false; }
3968
3969  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3970
3971  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3972    entrypoint_ = entrypoint;
3973  }
3974
3975  HLoadClass* GetLoadClass() const {
3976    HInstruction* input = InputAt(0);
3977    if (input->IsClinitCheck()) {
3978      input = input->InputAt(0);
3979    }
3980    DCHECK(input->IsLoadClass());
3981    return input->AsLoadClass();
3982  }
3983
3984  bool IsStringAlloc() const;
3985
3986  DECLARE_INSTRUCTION(NewInstance);
3987
3988 private:
3989  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3990  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3991  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3992                "Too many packed fields.");
3993
3994  const dex::TypeIndex type_index_;
3995  const DexFile& dex_file_;
3996  QuickEntrypointEnum entrypoint_;
3997
3998  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3999};
4000
4001enum IntrinsicNeedsEnvironmentOrCache {
4002  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
4003  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
4004};
4005
4006enum IntrinsicSideEffects {
4007  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
4008  kReadSideEffects,   // Intrinsic may read heap memory.
4009  kWriteSideEffects,  // Intrinsic may write heap memory.
4010  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
4011};
4012
4013enum IntrinsicExceptions {
4014  kNoThrow,  // Intrinsic does not throw any exceptions.
4015  kCanThrow  // Intrinsic may throw exceptions.
4016};
4017
4018class HInvoke : public HVariableInputSizeInstruction {
4019 public:
4020  bool NeedsEnvironment() const OVERRIDE;
4021
4022  void SetArgumentAt(size_t index, HInstruction* argument) {
4023    SetRawInputAt(index, argument);
4024  }
4025
4026  // Return the number of arguments.  This number can be lower than
4027  // the number of inputs returned by InputCount(), as some invoke
4028  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4029  // inputs at the end of their list of inputs.
4030  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4031
4032  DataType::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4033
4034  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4035
4036  InvokeType GetInvokeType() const {
4037    return GetPackedField<InvokeTypeField>();
4038  }
4039
4040  Intrinsics GetIntrinsic() const {
4041    return intrinsic_;
4042  }
4043
4044  void SetIntrinsic(Intrinsics intrinsic,
4045                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4046                    IntrinsicSideEffects side_effects,
4047                    IntrinsicExceptions exceptions);
4048
4049  bool IsFromInlinedInvoke() const {
4050    return GetEnvironment()->IsFromInlinedInvoke();
4051  }
4052
4053  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4054
4055  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4056
4057  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4058
4059  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4060    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4061  }
4062
4063  uint32_t* GetIntrinsicOptimizations() {
4064    return &intrinsic_optimizations_;
4065  }
4066
4067  const uint32_t* GetIntrinsicOptimizations() const {
4068    return &intrinsic_optimizations_;
4069  }
4070
4071  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4072
4073  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4074  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4075
4076  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4077
4078 protected:
4079  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4080  static constexpr size_t kFieldInvokeTypeSize =
4081      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4082  static constexpr size_t kFieldReturnType =
4083      kFieldInvokeType + kFieldInvokeTypeSize;
4084  static constexpr size_t kFieldReturnTypeSize =
4085      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4086  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4087  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4088  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4089  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4090  using ReturnTypeField = BitField<DataType::Type, kFieldReturnType, kFieldReturnTypeSize>;
4091
4092  HInvoke(ArenaAllocator* allocator,
4093          uint32_t number_of_arguments,
4094          uint32_t number_of_other_inputs,
4095          DataType::Type return_type,
4096          uint32_t dex_pc,
4097          uint32_t dex_method_index,
4098          ArtMethod* resolved_method,
4099          InvokeType invoke_type)
4100    : HVariableInputSizeInstruction(
4101          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4102          dex_pc,
4103          allocator,
4104          number_of_arguments + number_of_other_inputs,
4105          kArenaAllocInvokeInputs),
4106      number_of_arguments_(number_of_arguments),
4107      resolved_method_(resolved_method),
4108      dex_method_index_(dex_method_index),
4109      intrinsic_(Intrinsics::kNone),
4110      intrinsic_optimizations_(0) {
4111    SetPackedField<ReturnTypeField>(return_type);
4112    SetPackedField<InvokeTypeField>(invoke_type);
4113    SetPackedFlag<kFlagCanThrow>(true);
4114  }
4115
4116  uint32_t number_of_arguments_;
4117  ArtMethod* resolved_method_;
4118  const uint32_t dex_method_index_;
4119  Intrinsics intrinsic_;
4120
4121  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4122  uint32_t intrinsic_optimizations_;
4123
4124 private:
4125  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4126};
4127
4128class HInvokeUnresolved FINAL : public HInvoke {
4129 public:
4130  HInvokeUnresolved(ArenaAllocator* allocator,
4131                    uint32_t number_of_arguments,
4132                    DataType::Type return_type,
4133                    uint32_t dex_pc,
4134                    uint32_t dex_method_index,
4135                    InvokeType invoke_type)
4136      : HInvoke(allocator,
4137                number_of_arguments,
4138                0u /* number_of_other_inputs */,
4139                return_type,
4140                dex_pc,
4141                dex_method_index,
4142                nullptr,
4143                invoke_type) {
4144  }
4145
4146  DECLARE_INSTRUCTION(InvokeUnresolved);
4147
4148 private:
4149  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4150};
4151
4152class HInvokePolymorphic FINAL : public HInvoke {
4153 public:
4154  HInvokePolymorphic(ArenaAllocator* allocator,
4155                     uint32_t number_of_arguments,
4156                     DataType::Type return_type,
4157                     uint32_t dex_pc,
4158                     uint32_t dex_method_index)
4159      : HInvoke(allocator,
4160                number_of_arguments,
4161                0u /* number_of_other_inputs */,
4162                return_type,
4163                dex_pc,
4164                dex_method_index,
4165                nullptr,
4166                kVirtual) {}
4167
4168  DECLARE_INSTRUCTION(InvokePolymorphic);
4169
4170 private:
4171  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4172};
4173
4174class HInvokeStaticOrDirect FINAL : public HInvoke {
4175 public:
4176  // Requirements of this method call regarding the class
4177  // initialization (clinit) check of its declaring class.
4178  enum class ClinitCheckRequirement {
4179    kNone,      // Class already initialized.
4180    kExplicit,  // Static call having explicit clinit check as last input.
4181    kImplicit,  // Static call implicitly requiring a clinit check.
4182    kLast = kImplicit
4183  };
4184
4185  // Determines how to load the target ArtMethod*.
4186  enum class MethodLoadKind {
4187    // Use a String init ArtMethod* loaded from Thread entrypoints.
4188    kStringInit,
4189
4190    // Use the method's own ArtMethod* loaded by the register allocator.
4191    kRecursive,
4192
4193    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4194    // Used for boot image methods referenced by boot image code.
4195    kBootImageLinkTimePcRelative,
4196
4197    // Use ArtMethod* at a known address, embed the direct address in the code.
4198    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4199    kDirectAddress,
4200
4201    // Load from an entry in the .bss section using a PC-relative load.
4202    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
4203    kBssEntry,
4204
4205    // Make a runtime call to resolve and call the method. This is the last-resort-kind
4206    // used when other kinds are unimplemented on a particular architecture.
4207    kRuntimeCall,
4208  };
4209
4210  // Determines the location of the code pointer.
4211  enum class CodePtrLocation {
4212    // Recursive call, use local PC-relative call instruction.
4213    kCallSelf,
4214
4215    // Use code pointer from the ArtMethod*.
4216    // Used when we don't know the target code. This is also the last-resort-kind used when
4217    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4218    kCallArtMethod,
4219  };
4220
4221  struct DispatchInfo {
4222    MethodLoadKind method_load_kind;
4223    CodePtrLocation code_ptr_location;
4224    // The method load data holds
4225    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4226    //     Note that there are multiple string init methods, each having its own offset.
4227    //   - the method address for kDirectAddress
4228    uint64_t method_load_data;
4229  };
4230
4231  HInvokeStaticOrDirect(ArenaAllocator* allocator,
4232                        uint32_t number_of_arguments,
4233                        DataType::Type return_type,
4234                        uint32_t dex_pc,
4235                        uint32_t method_index,
4236                        ArtMethod* resolved_method,
4237                        DispatchInfo dispatch_info,
4238                        InvokeType invoke_type,
4239                        MethodReference target_method,
4240                        ClinitCheckRequirement clinit_check_requirement)
4241      : HInvoke(allocator,
4242                number_of_arguments,
4243                // There is potentially one extra argument for the HCurrentMethod node, and
4244                // potentially one other if the clinit check is explicit, and potentially
4245                // one other if the method is a string factory.
4246                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4247                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4248                return_type,
4249                dex_pc,
4250                method_index,
4251                resolved_method,
4252                invoke_type),
4253        target_method_(target_method),
4254        dispatch_info_(dispatch_info) {
4255    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4256  }
4257
4258  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4259    bool had_current_method_input = HasCurrentMethodInput();
4260    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4261
4262    // Using the current method is the default and once we find a better
4263    // method load kind, we should not go back to using the current method.
4264    DCHECK(had_current_method_input || !needs_current_method_input);
4265
4266    if (had_current_method_input && !needs_current_method_input) {
4267      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4268      RemoveInputAt(GetSpecialInputIndex());
4269    }
4270    dispatch_info_ = dispatch_info;
4271  }
4272
4273  DispatchInfo GetDispatchInfo() const {
4274    return dispatch_info_;
4275  }
4276
4277  void AddSpecialInput(HInstruction* input) {
4278    // We allow only one special input.
4279    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4280    DCHECK(InputCount() == GetSpecialInputIndex() ||
4281           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4282    InsertInputAt(GetSpecialInputIndex(), input);
4283  }
4284
4285  using HInstruction::GetInputRecords;  // Keep the const version visible.
4286  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4287    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4288    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4289      DCHECK(!input_records.empty());
4290      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4291      HInstruction* last_input = input_records.back().GetInstruction();
4292      // Note: `last_input` may be null during arguments setup.
4293      if (last_input != nullptr) {
4294        // `last_input` is the last input of a static invoke marked as having
4295        // an explicit clinit check. It must either be:
4296        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4297        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4298        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4299      }
4300    }
4301    return input_records;
4302  }
4303
4304  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4305    // We access the method via the dex cache so we can't do an implicit null check.
4306    // TODO: for intrinsics we can generate implicit null checks.
4307    return false;
4308  }
4309
4310  bool CanBeNull() const OVERRIDE {
4311    return GetPackedField<ReturnTypeField>() == DataType::Type::kReference && !IsStringInit();
4312  }
4313
4314  // Get the index of the special input, if any.
4315  //
4316  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4317  // method pointer; otherwise there may be one platform-specific special input,
4318  // such as PC-relative addressing base.
4319  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4320  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4321
4322  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4323  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4324  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4325  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4326  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4327  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4328  bool HasPcRelativeMethodLoadKind() const {
4329    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4330           GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4331  }
4332  bool HasCurrentMethodInput() const {
4333    // This function can be called only after the invoke has been fully initialized by the builder.
4334    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4335      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4336      return true;
4337    } else {
4338      DCHECK(InputCount() == GetSpecialInputIndex() ||
4339             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4340      return false;
4341    }
4342  }
4343
4344  QuickEntrypointEnum GetStringInitEntryPoint() const {
4345    DCHECK(IsStringInit());
4346    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4347  }
4348
4349  uint64_t GetMethodAddress() const {
4350    DCHECK(HasMethodAddress());
4351    return dispatch_info_.method_load_data;
4352  }
4353
4354  const DexFile& GetDexFileForPcRelativeDexCache() const;
4355
4356  ClinitCheckRequirement GetClinitCheckRequirement() const {
4357    return GetPackedField<ClinitCheckRequirementField>();
4358  }
4359
4360  // Is this instruction a call to a static method?
4361  bool IsStatic() const {
4362    return GetInvokeType() == kStatic;
4363  }
4364
4365  MethodReference GetTargetMethod() const {
4366    return target_method_;
4367  }
4368
4369  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4370  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4371  // instruction; only relevant for static calls with explicit clinit check.
4372  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4373    DCHECK(IsStaticWithExplicitClinitCheck());
4374    size_t last_input_index = inputs_.size() - 1u;
4375    HInstruction* last_input = inputs_.back().GetInstruction();
4376    DCHECK(last_input != nullptr);
4377    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4378    RemoveAsUserOfInput(last_input_index);
4379    inputs_.pop_back();
4380    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4381    DCHECK(!IsStaticWithExplicitClinitCheck());
4382  }
4383
4384  // Is this a call to a static method whose declaring class has an
4385  // explicit initialization check in the graph?
4386  bool IsStaticWithExplicitClinitCheck() const {
4387    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4388  }
4389
4390  // Is this a call to a static method whose declaring class has an
4391  // implicit intialization check requirement?
4392  bool IsStaticWithImplicitClinitCheck() const {
4393    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4394  }
4395
4396  // Does this method load kind need the current method as an input?
4397  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4398    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4399  }
4400
4401  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4402
4403 private:
4404  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4405  static constexpr size_t kFieldClinitCheckRequirementSize =
4406      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4407  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4408      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4409  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4410                "Too many packed fields.");
4411  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4412                                               kFieldClinitCheckRequirement,
4413                                               kFieldClinitCheckRequirementSize>;
4414
4415  // Cached values of the resolved method, to avoid needing the mutator lock.
4416  MethodReference target_method_;
4417  DispatchInfo dispatch_info_;
4418
4419  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4420};
4421std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4422std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4423
4424class HInvokeVirtual FINAL : public HInvoke {
4425 public:
4426  HInvokeVirtual(ArenaAllocator* allocator,
4427                 uint32_t number_of_arguments,
4428                 DataType::Type return_type,
4429                 uint32_t dex_pc,
4430                 uint32_t dex_method_index,
4431                 ArtMethod* resolved_method,
4432                 uint32_t vtable_index)
4433      : HInvoke(allocator,
4434                number_of_arguments,
4435                0u,
4436                return_type,
4437                dex_pc,
4438                dex_method_index,
4439                resolved_method,
4440                kVirtual),
4441        vtable_index_(vtable_index) {}
4442
4443  bool CanBeNull() const OVERRIDE {
4444    switch (GetIntrinsic()) {
4445      case Intrinsics::kThreadCurrentThread:
4446      case Intrinsics::kStringBufferAppend:
4447      case Intrinsics::kStringBufferToString:
4448      case Intrinsics::kStringBuilderAppend:
4449      case Intrinsics::kStringBuilderToString:
4450        return false;
4451      default:
4452        return HInvoke::CanBeNull();
4453    }
4454  }
4455
4456  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4457    // TODO: Add implicit null checks in intrinsics.
4458    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4459  }
4460
4461  uint32_t GetVTableIndex() const { return vtable_index_; }
4462
4463  DECLARE_INSTRUCTION(InvokeVirtual);
4464
4465 private:
4466  // Cached value of the resolved method, to avoid needing the mutator lock.
4467  const uint32_t vtable_index_;
4468
4469  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4470};
4471
4472class HInvokeInterface FINAL : public HInvoke {
4473 public:
4474  HInvokeInterface(ArenaAllocator* allocator,
4475                   uint32_t number_of_arguments,
4476                   DataType::Type return_type,
4477                   uint32_t dex_pc,
4478                   uint32_t dex_method_index,
4479                   ArtMethod* resolved_method,
4480                   uint32_t imt_index)
4481      : HInvoke(allocator,
4482                number_of_arguments,
4483                0u,
4484                return_type,
4485                dex_pc,
4486                dex_method_index,
4487                resolved_method,
4488                kInterface),
4489        imt_index_(imt_index) {}
4490
4491  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4492    // TODO: Add implicit null checks in intrinsics.
4493    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4494  }
4495
4496  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4497    // The assembly stub currently needs it.
4498    return true;
4499  }
4500
4501  uint32_t GetImtIndex() const { return imt_index_; }
4502  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4503
4504  DECLARE_INSTRUCTION(InvokeInterface);
4505
4506 private:
4507  // Cached value of the resolved method, to avoid needing the mutator lock.
4508  const uint32_t imt_index_;
4509
4510  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4511};
4512
4513class HNeg FINAL : public HUnaryOperation {
4514 public:
4515  HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4516      : HUnaryOperation(result_type, input, dex_pc) {
4517    DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4518  }
4519
4520  template <typename T> static T Compute(T x) { return -x; }
4521
4522  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4523    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4524  }
4525  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4526    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4527  }
4528  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4529    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4530  }
4531  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4532    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4533  }
4534
4535  DECLARE_INSTRUCTION(Neg);
4536
4537 private:
4538  DISALLOW_COPY_AND_ASSIGN(HNeg);
4539};
4540
4541class HNewArray FINAL : public HExpression<2> {
4542 public:
4543  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4544      : HExpression(DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4545    SetRawInputAt(0, cls);
4546    SetRawInputAt(1, length);
4547  }
4548
4549  // Calls runtime so needs an environment.
4550  bool NeedsEnvironment() const OVERRIDE { return true; }
4551
4552  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4553  bool CanThrow() const OVERRIDE { return true; }
4554
4555  bool CanBeNull() const OVERRIDE { return false; }
4556
4557  HLoadClass* GetLoadClass() const {
4558    DCHECK(InputAt(0)->IsLoadClass());
4559    return InputAt(0)->AsLoadClass();
4560  }
4561
4562  HInstruction* GetLength() const {
4563    return InputAt(1);
4564  }
4565
4566  DECLARE_INSTRUCTION(NewArray);
4567
4568 private:
4569  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4570};
4571
4572class HAdd FINAL : public HBinaryOperation {
4573 public:
4574  HAdd(DataType::Type result_type,
4575       HInstruction* left,
4576       HInstruction* right,
4577       uint32_t dex_pc = kNoDexPc)
4578      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4579
4580  bool IsCommutative() const OVERRIDE { return true; }
4581
4582  template <typename T> static T Compute(T x, T y) { return x + y; }
4583
4584  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4585    return GetBlock()->GetGraph()->GetIntConstant(
4586        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4587  }
4588  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4589    return GetBlock()->GetGraph()->GetLongConstant(
4590        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4591  }
4592  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4593    return GetBlock()->GetGraph()->GetFloatConstant(
4594        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4595  }
4596  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4597    return GetBlock()->GetGraph()->GetDoubleConstant(
4598        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4599  }
4600
4601  DECLARE_INSTRUCTION(Add);
4602
4603 private:
4604  DISALLOW_COPY_AND_ASSIGN(HAdd);
4605};
4606
4607class HSub FINAL : public HBinaryOperation {
4608 public:
4609  HSub(DataType::Type result_type,
4610       HInstruction* left,
4611       HInstruction* right,
4612       uint32_t dex_pc = kNoDexPc)
4613      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4614
4615  template <typename T> static T Compute(T x, T y) { return x - y; }
4616
4617  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4618    return GetBlock()->GetGraph()->GetIntConstant(
4619        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4620  }
4621  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4622    return GetBlock()->GetGraph()->GetLongConstant(
4623        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4624  }
4625  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4626    return GetBlock()->GetGraph()->GetFloatConstant(
4627        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4628  }
4629  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4630    return GetBlock()->GetGraph()->GetDoubleConstant(
4631        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4632  }
4633
4634  DECLARE_INSTRUCTION(Sub);
4635
4636 private:
4637  DISALLOW_COPY_AND_ASSIGN(HSub);
4638};
4639
4640class HMul FINAL : public HBinaryOperation {
4641 public:
4642  HMul(DataType::Type result_type,
4643       HInstruction* left,
4644       HInstruction* right,
4645       uint32_t dex_pc = kNoDexPc)
4646      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4647
4648  bool IsCommutative() const OVERRIDE { return true; }
4649
4650  template <typename T> static T Compute(T x, T y) { return x * y; }
4651
4652  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4653    return GetBlock()->GetGraph()->GetIntConstant(
4654        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4655  }
4656  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4657    return GetBlock()->GetGraph()->GetLongConstant(
4658        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4659  }
4660  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4661    return GetBlock()->GetGraph()->GetFloatConstant(
4662        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4663  }
4664  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4665    return GetBlock()->GetGraph()->GetDoubleConstant(
4666        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4667  }
4668
4669  DECLARE_INSTRUCTION(Mul);
4670
4671 private:
4672  DISALLOW_COPY_AND_ASSIGN(HMul);
4673};
4674
4675class HDiv FINAL : public HBinaryOperation {
4676 public:
4677  HDiv(DataType::Type result_type,
4678       HInstruction* left,
4679       HInstruction* right,
4680       uint32_t dex_pc)
4681      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4682
4683  template <typename T>
4684  T ComputeIntegral(T x, T y) const {
4685    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4686    // Our graph structure ensures we never have 0 for `y` during
4687    // constant folding.
4688    DCHECK_NE(y, 0);
4689    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4690    return (y == -1) ? -x : x / y;
4691  }
4692
4693  template <typename T>
4694  T ComputeFP(T x, T y) const {
4695    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4696    return x / y;
4697  }
4698
4699  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4700    return GetBlock()->GetGraph()->GetIntConstant(
4701        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4702  }
4703  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4704    return GetBlock()->GetGraph()->GetLongConstant(
4705        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4706  }
4707  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4708    return GetBlock()->GetGraph()->GetFloatConstant(
4709        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4710  }
4711  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4712    return GetBlock()->GetGraph()->GetDoubleConstant(
4713        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4714  }
4715
4716  DECLARE_INSTRUCTION(Div);
4717
4718 private:
4719  DISALLOW_COPY_AND_ASSIGN(HDiv);
4720};
4721
4722class HRem FINAL : public HBinaryOperation {
4723 public:
4724  HRem(DataType::Type result_type,
4725       HInstruction* left,
4726       HInstruction* right,
4727       uint32_t dex_pc)
4728      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4729
4730  template <typename T>
4731  T ComputeIntegral(T x, T y) const {
4732    DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4733    // Our graph structure ensures we never have 0 for `y` during
4734    // constant folding.
4735    DCHECK_NE(y, 0);
4736    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4737    return (y == -1) ? 0 : x % y;
4738  }
4739
4740  template <typename T>
4741  T ComputeFP(T x, T y) const {
4742    DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4743    return std::fmod(x, y);
4744  }
4745
4746  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4747    return GetBlock()->GetGraph()->GetIntConstant(
4748        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4749  }
4750  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4751    return GetBlock()->GetGraph()->GetLongConstant(
4752        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4753  }
4754  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4755    return GetBlock()->GetGraph()->GetFloatConstant(
4756        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4757  }
4758  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4759    return GetBlock()->GetGraph()->GetDoubleConstant(
4760        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4761  }
4762
4763  DECLARE_INSTRUCTION(Rem);
4764
4765 private:
4766  DISALLOW_COPY_AND_ASSIGN(HRem);
4767};
4768
4769class HDivZeroCheck FINAL : public HExpression<1> {
4770 public:
4771  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4772  // constructor.
4773  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4774      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4775    SetRawInputAt(0, value);
4776  }
4777
4778  DataType::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4779
4780  bool CanBeMoved() const OVERRIDE { return true; }
4781
4782  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4783    return true;
4784  }
4785
4786  bool NeedsEnvironment() const OVERRIDE { return true; }
4787  bool CanThrow() const OVERRIDE { return true; }
4788
4789  DECLARE_INSTRUCTION(DivZeroCheck);
4790
4791 private:
4792  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4793};
4794
4795class HShl FINAL : public HBinaryOperation {
4796 public:
4797  HShl(DataType::Type result_type,
4798       HInstruction* value,
4799       HInstruction* distance,
4800       uint32_t dex_pc = kNoDexPc)
4801      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4802    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4803    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4804  }
4805
4806  template <typename T>
4807  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4808    return value << (distance & max_shift_distance);
4809  }
4810
4811  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4812    return GetBlock()->GetGraph()->GetIntConstant(
4813        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4814  }
4815  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4816    return GetBlock()->GetGraph()->GetLongConstant(
4817        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4818  }
4819  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4820                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4821    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4822    UNREACHABLE();
4823  }
4824  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4825                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4826    LOG(FATAL) << DebugName() << " is not defined for float values";
4827    UNREACHABLE();
4828  }
4829  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4830                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4831    LOG(FATAL) << DebugName() << " is not defined for double values";
4832    UNREACHABLE();
4833  }
4834
4835  DECLARE_INSTRUCTION(Shl);
4836
4837 private:
4838  DISALLOW_COPY_AND_ASSIGN(HShl);
4839};
4840
4841class HShr FINAL : public HBinaryOperation {
4842 public:
4843  HShr(DataType::Type result_type,
4844       HInstruction* value,
4845       HInstruction* distance,
4846       uint32_t dex_pc = kNoDexPc)
4847      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4848    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4849    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4850  }
4851
4852  template <typename T>
4853  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4854    return value >> (distance & max_shift_distance);
4855  }
4856
4857  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4858    return GetBlock()->GetGraph()->GetIntConstant(
4859        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4860  }
4861  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4862    return GetBlock()->GetGraph()->GetLongConstant(
4863        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4864  }
4865  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4866                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4867    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4868    UNREACHABLE();
4869  }
4870  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4871                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4872    LOG(FATAL) << DebugName() << " is not defined for float values";
4873    UNREACHABLE();
4874  }
4875  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4876                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4877    LOG(FATAL) << DebugName() << " is not defined for double values";
4878    UNREACHABLE();
4879  }
4880
4881  DECLARE_INSTRUCTION(Shr);
4882
4883 private:
4884  DISALLOW_COPY_AND_ASSIGN(HShr);
4885};
4886
4887class HUShr FINAL : public HBinaryOperation {
4888 public:
4889  HUShr(DataType::Type result_type,
4890        HInstruction* value,
4891        HInstruction* distance,
4892        uint32_t dex_pc = kNoDexPc)
4893      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4894    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
4895    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
4896  }
4897
4898  template <typename T>
4899  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4900    typedef typename std::make_unsigned<T>::type V;
4901    V ux = static_cast<V>(value);
4902    return static_cast<T>(ux >> (distance & max_shift_distance));
4903  }
4904
4905  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4906    return GetBlock()->GetGraph()->GetIntConstant(
4907        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4908  }
4909  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4910    return GetBlock()->GetGraph()->GetLongConstant(
4911        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4912  }
4913  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4914                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4915    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4916    UNREACHABLE();
4917  }
4918  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4919                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4920    LOG(FATAL) << DebugName() << " is not defined for float values";
4921    UNREACHABLE();
4922  }
4923  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4924                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4925    LOG(FATAL) << DebugName() << " is not defined for double values";
4926    UNREACHABLE();
4927  }
4928
4929  DECLARE_INSTRUCTION(UShr);
4930
4931 private:
4932  DISALLOW_COPY_AND_ASSIGN(HUShr);
4933};
4934
4935class HAnd FINAL : public HBinaryOperation {
4936 public:
4937  HAnd(DataType::Type result_type,
4938       HInstruction* left,
4939       HInstruction* right,
4940       uint32_t dex_pc = kNoDexPc)
4941      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4942
4943  bool IsCommutative() const OVERRIDE { return true; }
4944
4945  template <typename T> static T Compute(T x, T y) { return x & y; }
4946
4947  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4948    return GetBlock()->GetGraph()->GetIntConstant(
4949        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4950  }
4951  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4952    return GetBlock()->GetGraph()->GetLongConstant(
4953        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4954  }
4955  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4956                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4957    LOG(FATAL) << DebugName() << " is not defined for float values";
4958    UNREACHABLE();
4959  }
4960  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4961                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4962    LOG(FATAL) << DebugName() << " is not defined for double values";
4963    UNREACHABLE();
4964  }
4965
4966  DECLARE_INSTRUCTION(And);
4967
4968 private:
4969  DISALLOW_COPY_AND_ASSIGN(HAnd);
4970};
4971
4972class HOr FINAL : public HBinaryOperation {
4973 public:
4974  HOr(DataType::Type result_type,
4975      HInstruction* left,
4976      HInstruction* right,
4977      uint32_t dex_pc = kNoDexPc)
4978      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4979
4980  bool IsCommutative() const OVERRIDE { return true; }
4981
4982  template <typename T> static T Compute(T x, T y) { return x | y; }
4983
4984  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4985    return GetBlock()->GetGraph()->GetIntConstant(
4986        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4987  }
4988  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4989    return GetBlock()->GetGraph()->GetLongConstant(
4990        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4991  }
4992  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4993                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4994    LOG(FATAL) << DebugName() << " is not defined for float values";
4995    UNREACHABLE();
4996  }
4997  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4998                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4999    LOG(FATAL) << DebugName() << " is not defined for double values";
5000    UNREACHABLE();
5001  }
5002
5003  DECLARE_INSTRUCTION(Or);
5004
5005 private:
5006  DISALLOW_COPY_AND_ASSIGN(HOr);
5007};
5008
5009class HXor FINAL : public HBinaryOperation {
5010 public:
5011  HXor(DataType::Type result_type,
5012       HInstruction* left,
5013       HInstruction* right,
5014       uint32_t dex_pc = kNoDexPc)
5015      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
5016
5017  bool IsCommutative() const OVERRIDE { return true; }
5018
5019  template <typename T> static T Compute(T x, T y) { return x ^ y; }
5020
5021  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5022    return GetBlock()->GetGraph()->GetIntConstant(
5023        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5024  }
5025  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5026    return GetBlock()->GetGraph()->GetLongConstant(
5027        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5028  }
5029  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5030                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5031    LOG(FATAL) << DebugName() << " is not defined for float values";
5032    UNREACHABLE();
5033  }
5034  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5035                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5036    LOG(FATAL) << DebugName() << " is not defined for double values";
5037    UNREACHABLE();
5038  }
5039
5040  DECLARE_INSTRUCTION(Xor);
5041
5042 private:
5043  DISALLOW_COPY_AND_ASSIGN(HXor);
5044};
5045
5046class HRor FINAL : public HBinaryOperation {
5047 public:
5048  HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5049    : HBinaryOperation(result_type, value, distance) {
5050    DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5051    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5052  }
5053
5054  template <typename T>
5055  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5056    typedef typename std::make_unsigned<T>::type V;
5057    V ux = static_cast<V>(value);
5058    if ((distance & max_shift_value) == 0) {
5059      return static_cast<T>(ux);
5060    } else {
5061      const V reg_bits = sizeof(T) * 8;
5062      return static_cast<T>(ux >> (distance & max_shift_value)) |
5063                           (value << (reg_bits - (distance & max_shift_value)));
5064    }
5065  }
5066
5067  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5068    return GetBlock()->GetGraph()->GetIntConstant(
5069        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5070  }
5071  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5072    return GetBlock()->GetGraph()->GetLongConstant(
5073        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5074  }
5075  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5076                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5077    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5078    UNREACHABLE();
5079  }
5080  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5081                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5082    LOG(FATAL) << DebugName() << " is not defined for float values";
5083    UNREACHABLE();
5084  }
5085  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5086                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5087    LOG(FATAL) << DebugName() << " is not defined for double values";
5088    UNREACHABLE();
5089  }
5090
5091  DECLARE_INSTRUCTION(Ror);
5092
5093 private:
5094  DISALLOW_COPY_AND_ASSIGN(HRor);
5095};
5096
5097// The value of a parameter in this method. Its location depends on
5098// the calling convention.
5099class HParameterValue FINAL : public HExpression<0> {
5100 public:
5101  HParameterValue(const DexFile& dex_file,
5102                  dex::TypeIndex type_index,
5103                  uint8_t index,
5104                  DataType::Type parameter_type,
5105                  bool is_this = false)
5106      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5107        dex_file_(dex_file),
5108        type_index_(type_index),
5109        index_(index) {
5110    SetPackedFlag<kFlagIsThis>(is_this);
5111    SetPackedFlag<kFlagCanBeNull>(!is_this);
5112  }
5113
5114  const DexFile& GetDexFile() const { return dex_file_; }
5115  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5116  uint8_t GetIndex() const { return index_; }
5117  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5118
5119  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5120  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5121
5122  DECLARE_INSTRUCTION(ParameterValue);
5123
5124 private:
5125  // Whether or not the parameter value corresponds to 'this' argument.
5126  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5127  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5128  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5129  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5130                "Too many packed fields.");
5131
5132  const DexFile& dex_file_;
5133  const dex::TypeIndex type_index_;
5134  // The index of this parameter in the parameters list. Must be less
5135  // than HGraph::number_of_in_vregs_.
5136  const uint8_t index_;
5137
5138  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5139};
5140
5141class HNot FINAL : public HUnaryOperation {
5142 public:
5143  HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5144      : HUnaryOperation(result_type, input, dex_pc) {}
5145
5146  bool CanBeMoved() const OVERRIDE { return true; }
5147  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5148    return true;
5149  }
5150
5151  template <typename T> static T Compute(T x) { return ~x; }
5152
5153  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5154    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5155  }
5156  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5157    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5158  }
5159  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5160    LOG(FATAL) << DebugName() << " is not defined for float values";
5161    UNREACHABLE();
5162  }
5163  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5164    LOG(FATAL) << DebugName() << " is not defined for double values";
5165    UNREACHABLE();
5166  }
5167
5168  DECLARE_INSTRUCTION(Not);
5169
5170 private:
5171  DISALLOW_COPY_AND_ASSIGN(HNot);
5172};
5173
5174class HBooleanNot FINAL : public HUnaryOperation {
5175 public:
5176  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5177      : HUnaryOperation(DataType::Type::kBool, input, dex_pc) {}
5178
5179  bool CanBeMoved() const OVERRIDE { return true; }
5180  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5181    return true;
5182  }
5183
5184  template <typename T> static bool Compute(T x) {
5185    DCHECK(IsUint<1>(x)) << x;
5186    return !x;
5187  }
5188
5189  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5190    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5191  }
5192  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5193    LOG(FATAL) << DebugName() << " is not defined for long values";
5194    UNREACHABLE();
5195  }
5196  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5197    LOG(FATAL) << DebugName() << " is not defined for float values";
5198    UNREACHABLE();
5199  }
5200  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5201    LOG(FATAL) << DebugName() << " is not defined for double values";
5202    UNREACHABLE();
5203  }
5204
5205  DECLARE_INSTRUCTION(BooleanNot);
5206
5207 private:
5208  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5209};
5210
5211class HTypeConversion FINAL : public HExpression<1> {
5212 public:
5213  // Instantiate a type conversion of `input` to `result_type`.
5214  HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5215      : HExpression(result_type, SideEffects::None(), dex_pc) {
5216    SetRawInputAt(0, input);
5217    // Invariant: We should never generate a conversion to a Boolean value.
5218    DCHECK_NE(DataType::Type::kBool, result_type);
5219  }
5220
5221  HInstruction* GetInput() const { return InputAt(0); }
5222  DataType::Type GetInputType() const { return GetInput()->GetType(); }
5223  DataType::Type GetResultType() const { return GetType(); }
5224
5225  bool CanBeMoved() const OVERRIDE { return true; }
5226  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5227    return true;
5228  }
5229
5230  // Try to statically evaluate the conversion and return a HConstant
5231  // containing the result.  If the input cannot be converted, return nullptr.
5232  HConstant* TryStaticEvaluation() const;
5233
5234  DECLARE_INSTRUCTION(TypeConversion);
5235
5236 private:
5237  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5238};
5239
5240static constexpr uint32_t kNoRegNumber = -1;
5241
5242class HNullCheck FINAL : public HExpression<1> {
5243 public:
5244  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5245  // constructor.
5246  HNullCheck(HInstruction* value, uint32_t dex_pc)
5247      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5248    SetRawInputAt(0, value);
5249  }
5250
5251  bool CanBeMoved() const OVERRIDE { return true; }
5252  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5253    return true;
5254  }
5255
5256  bool NeedsEnvironment() const OVERRIDE { return true; }
5257
5258  bool CanThrow() const OVERRIDE { return true; }
5259
5260  bool CanBeNull() const OVERRIDE { return false; }
5261
5262
5263  DECLARE_INSTRUCTION(NullCheck);
5264
5265 private:
5266  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5267};
5268
5269// Embeds an ArtField and all the information required by the compiler. We cache
5270// that information to avoid requiring the mutator lock every time we need it.
5271class FieldInfo : public ValueObject {
5272 public:
5273  FieldInfo(ArtField* field,
5274            MemberOffset field_offset,
5275            DataType::Type field_type,
5276            bool is_volatile,
5277            uint32_t index,
5278            uint16_t declaring_class_def_index,
5279            const DexFile& dex_file)
5280      : field_(field),
5281        field_offset_(field_offset),
5282        field_type_(field_type),
5283        is_volatile_(is_volatile),
5284        index_(index),
5285        declaring_class_def_index_(declaring_class_def_index),
5286        dex_file_(dex_file) {}
5287
5288  ArtField* GetField() const { return field_; }
5289  MemberOffset GetFieldOffset() const { return field_offset_; }
5290  DataType::Type GetFieldType() const { return field_type_; }
5291  uint32_t GetFieldIndex() const { return index_; }
5292  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5293  const DexFile& GetDexFile() const { return dex_file_; }
5294  bool IsVolatile() const { return is_volatile_; }
5295
5296 private:
5297  ArtField* const field_;
5298  const MemberOffset field_offset_;
5299  const DataType::Type field_type_;
5300  const bool is_volatile_;
5301  const uint32_t index_;
5302  const uint16_t declaring_class_def_index_;
5303  const DexFile& dex_file_;
5304};
5305
5306class HInstanceFieldGet FINAL : public HExpression<1> {
5307 public:
5308  HInstanceFieldGet(HInstruction* value,
5309                    ArtField* field,
5310                    DataType::Type field_type,
5311                    MemberOffset field_offset,
5312                    bool is_volatile,
5313                    uint32_t field_idx,
5314                    uint16_t declaring_class_def_index,
5315                    const DexFile& dex_file,
5316                    uint32_t dex_pc)
5317      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5318        field_info_(field,
5319                    field_offset,
5320                    field_type,
5321                    is_volatile,
5322                    field_idx,
5323                    declaring_class_def_index,
5324                    dex_file) {
5325    SetRawInputAt(0, value);
5326  }
5327
5328  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5329
5330  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5331    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5332    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5333  }
5334
5335  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5336    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5337  }
5338
5339  size_t ComputeHashCode() const OVERRIDE {
5340    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5341  }
5342
5343  const FieldInfo& GetFieldInfo() const { return field_info_; }
5344  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5345  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5346  bool IsVolatile() const { return field_info_.IsVolatile(); }
5347
5348  DECLARE_INSTRUCTION(InstanceFieldGet);
5349
5350 private:
5351  const FieldInfo field_info_;
5352
5353  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5354};
5355
5356class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5357 public:
5358  HInstanceFieldSet(HInstruction* object,
5359                    HInstruction* value,
5360                    ArtField* field,
5361                    DataType::Type field_type,
5362                    MemberOffset field_offset,
5363                    bool is_volatile,
5364                    uint32_t field_idx,
5365                    uint16_t declaring_class_def_index,
5366                    const DexFile& dex_file,
5367                    uint32_t dex_pc)
5368      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5369        field_info_(field,
5370                    field_offset,
5371                    field_type,
5372                    is_volatile,
5373                    field_idx,
5374                    declaring_class_def_index,
5375                    dex_file) {
5376    SetPackedFlag<kFlagValueCanBeNull>(true);
5377    SetRawInputAt(0, object);
5378    SetRawInputAt(1, value);
5379  }
5380
5381  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5382    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5383  }
5384
5385  const FieldInfo& GetFieldInfo() const { return field_info_; }
5386  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5387  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
5388  bool IsVolatile() const { return field_info_.IsVolatile(); }
5389  HInstruction* GetValue() const { return InputAt(1); }
5390  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5391  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5392
5393  DECLARE_INSTRUCTION(InstanceFieldSet);
5394
5395 private:
5396  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5397  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5398  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5399                "Too many packed fields.");
5400
5401  const FieldInfo field_info_;
5402
5403  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5404};
5405
5406class HArrayGet FINAL : public HExpression<2> {
5407 public:
5408  HArrayGet(HInstruction* array,
5409            HInstruction* index,
5410            DataType::Type type,
5411            uint32_t dex_pc)
5412     : HArrayGet(array,
5413                 index,
5414                 type,
5415                 SideEffects::ArrayReadOfType(type),
5416                 dex_pc,
5417                 /* is_string_char_at */ false) {}
5418
5419  HArrayGet(HInstruction* array,
5420            HInstruction* index,
5421            DataType::Type type,
5422            SideEffects side_effects,
5423            uint32_t dex_pc,
5424            bool is_string_char_at)
5425      : HExpression(type, side_effects, dex_pc) {
5426    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5427    SetRawInputAt(0, array);
5428    SetRawInputAt(1, index);
5429  }
5430
5431  bool CanBeMoved() const OVERRIDE { return true; }
5432  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5433    return true;
5434  }
5435  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5436    // TODO: We can be smarter here.
5437    // Currently, unless the array is the result of NewArray, the array access is always
5438    // preceded by some form of null NullCheck necessary for the bounds check, usually
5439    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5440    // dynamic BCE. There are cases when these could be removed to produce better code.
5441    // If we ever add optimizations to do so we should allow an implicit check here
5442    // (as long as the address falls in the first page).
5443    //
5444    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5445    //     a = cond ? new int[1] : null;
5446    //     a[0];  // The Phi does not need bounds check for either input.
5447    return false;
5448  }
5449
5450  bool IsEquivalentOf(HArrayGet* other) const {
5451    bool result = (GetDexPc() == other->GetDexPc());
5452    if (kIsDebugBuild && result) {
5453      DCHECK_EQ(GetBlock(), other->GetBlock());
5454      DCHECK_EQ(GetArray(), other->GetArray());
5455      DCHECK_EQ(GetIndex(), other->GetIndex());
5456      if (DataType::IsIntOrLongType(GetType())) {
5457        DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5458      } else {
5459        DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5460        DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5461      }
5462    }
5463    return result;
5464  }
5465
5466  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5467
5468  HInstruction* GetArray() const { return InputAt(0); }
5469  HInstruction* GetIndex() const { return InputAt(1); }
5470
5471  DECLARE_INSTRUCTION(ArrayGet);
5472
5473 private:
5474  // We treat a String as an array, creating the HArrayGet from String.charAt()
5475  // intrinsic in the instruction simplifier. We can always determine whether
5476  // a particular HArrayGet is actually a String.charAt() by looking at the type
5477  // of the input but that requires holding the mutator lock, so we prefer to use
5478  // a flag, so that code generators don't need to do the locking.
5479  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5480  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5481  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5482                "Too many packed fields.");
5483
5484  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5485};
5486
5487class HArraySet FINAL : public HTemplateInstruction<3> {
5488 public:
5489  HArraySet(HInstruction* array,
5490            HInstruction* index,
5491            HInstruction* value,
5492            DataType::Type expected_component_type,
5493            uint32_t dex_pc)
5494      : HArraySet(array,
5495                  index,
5496                  value,
5497                  expected_component_type,
5498                  // Make a best guess for side effects now, may be refined during SSA building.
5499                  ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
5500                  dex_pc) {}
5501
5502  HArraySet(HInstruction* array,
5503            HInstruction* index,
5504            HInstruction* value,
5505            DataType::Type expected_component_type,
5506            SideEffects side_effects,
5507            uint32_t dex_pc)
5508      : HTemplateInstruction(side_effects, dex_pc) {
5509    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5510    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
5511    SetPackedFlag<kFlagValueCanBeNull>(true);
5512    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5513    SetRawInputAt(0, array);
5514    SetRawInputAt(1, index);
5515    SetRawInputAt(2, value);
5516  }
5517
5518  bool NeedsEnvironment() const OVERRIDE {
5519    // We call a runtime method to throw ArrayStoreException.
5520    return NeedsTypeCheck();
5521  }
5522
5523  // Can throw ArrayStoreException.
5524  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5525
5526  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5527    // TODO: Same as for ArrayGet.
5528    return false;
5529  }
5530
5531  void ClearNeedsTypeCheck() {
5532    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5533  }
5534
5535  void ClearValueCanBeNull() {
5536    SetPackedFlag<kFlagValueCanBeNull>(false);
5537  }
5538
5539  void SetStaticTypeOfArrayIsObjectArray() {
5540    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5541  }
5542
5543  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5544  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5545  bool StaticTypeOfArrayIsObjectArray() const {
5546    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5547  }
5548
5549  HInstruction* GetArray() const { return InputAt(0); }
5550  HInstruction* GetIndex() const { return InputAt(1); }
5551  HInstruction* GetValue() const { return InputAt(2); }
5552
5553  DataType::Type GetComponentType() const {
5554    return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
5555  }
5556
5557  static DataType::Type GetComponentType(DataType::Type value_type,
5558                                         DataType::Type expected_component_type) {
5559    // The Dex format does not type floating point index operations. Since the
5560    // `expected_component_type` comes from SSA building and can therefore not
5561    // be correct, we also check what is the value type. If it is a floating
5562    // point type, we must use that type.
5563    return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
5564        ? value_type
5565        : expected_component_type;
5566  }
5567
5568  DataType::Type GetRawExpectedComponentType() const {
5569    return GetPackedField<ExpectedComponentTypeField>();
5570  }
5571
5572  static SideEffects ComputeSideEffects(DataType::Type type) {
5573    return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
5574  }
5575
5576  static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
5577    return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
5578                                                      : SideEffects::None();
5579  }
5580
5581  DECLARE_INSTRUCTION(ArraySet);
5582
5583 private:
5584  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5585  static constexpr size_t kFieldExpectedComponentTypeSize =
5586      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
5587  static constexpr size_t kFlagNeedsTypeCheck =
5588      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5589  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5590  // Cached information for the reference_type_info_ so that codegen
5591  // does not need to inspect the static type.
5592  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5593  static constexpr size_t kNumberOfArraySetPackedBits =
5594      kFlagStaticTypeOfArrayIsObjectArray + 1;
5595  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5596  using ExpectedComponentTypeField =
5597      BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5598
5599  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5600};
5601
5602class HArrayLength FINAL : public HExpression<1> {
5603 public:
5604  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5605      : HExpression(DataType::Type::kInt32, SideEffects::None(), dex_pc) {
5606    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5607    // Note that arrays do not change length, so the instruction does not
5608    // depend on any write.
5609    SetRawInputAt(0, array);
5610  }
5611
5612  bool CanBeMoved() const OVERRIDE { return true; }
5613  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5614    return true;
5615  }
5616  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5617    return obj == InputAt(0);
5618  }
5619
5620  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5621
5622  DECLARE_INSTRUCTION(ArrayLength);
5623
5624 private:
5625  // We treat a String as an array, creating the HArrayLength from String.length()
5626  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5627  // determine whether a particular HArrayLength is actually a String.length() by
5628  // looking at the type of the input but that requires holding the mutator lock, so
5629  // we prefer to use a flag, so that code generators don't need to do the locking.
5630  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5631  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5632  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5633                "Too many packed fields.");
5634
5635  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5636};
5637
5638class HBoundsCheck FINAL : public HExpression<2> {
5639 public:
5640  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5641  // constructor.
5642  HBoundsCheck(HInstruction* index,
5643               HInstruction* length,
5644               uint32_t dex_pc,
5645               bool string_char_at = false)
5646      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5647    DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
5648    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5649    SetRawInputAt(0, index);
5650    SetRawInputAt(1, length);
5651  }
5652
5653  bool CanBeMoved() const OVERRIDE { return true; }
5654  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5655    return true;
5656  }
5657
5658  bool NeedsEnvironment() const OVERRIDE { return true; }
5659
5660  bool CanThrow() const OVERRIDE { return true; }
5661
5662  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5663
5664  HInstruction* GetIndex() const { return InputAt(0); }
5665
5666  DECLARE_INSTRUCTION(BoundsCheck);
5667
5668 private:
5669  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5670
5671  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5672};
5673
5674class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5675 public:
5676  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5677      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5678
5679  bool NeedsEnvironment() const OVERRIDE {
5680    return true;
5681  }
5682
5683  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5684  SlowPathCode* GetSlowPath() const { return slow_path_; }
5685
5686  DECLARE_INSTRUCTION(SuspendCheck);
5687
5688 private:
5689  // Only used for code generation, in order to share the same slow path between back edges
5690  // of a same loop.
5691  SlowPathCode* slow_path_;
5692
5693  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5694};
5695
5696// Pseudo-instruction which provides the native debugger with mapping information.
5697// It ensures that we can generate line number and local variables at this point.
5698class HNativeDebugInfo : public HTemplateInstruction<0> {
5699 public:
5700  explicit HNativeDebugInfo(uint32_t dex_pc)
5701      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5702
5703  bool NeedsEnvironment() const OVERRIDE {
5704    return true;
5705  }
5706
5707  DECLARE_INSTRUCTION(NativeDebugInfo);
5708
5709 private:
5710  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5711};
5712
5713/**
5714 * Instruction to load a Class object.
5715 */
5716class HLoadClass FINAL : public HInstruction {
5717 public:
5718  // Determines how to load the Class.
5719  enum class LoadKind {
5720    // We cannot load this class. See HSharpening::SharpenLoadClass.
5721    kInvalid = -1,
5722
5723    // Use the Class* from the method's own ArtMethod*.
5724    kReferrersClass,
5725
5726    // Use PC-relative boot image Class* address that will be known at link time.
5727    // Used for boot image classes referenced by boot image code.
5728    kBootImageLinkTimePcRelative,
5729
5730    // Use a known boot image Class* address, embedded in the code by the codegen.
5731    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5732    kBootImageAddress,
5733
5734    // Use a PC-relative load from a boot image ClassTable mmapped into the .bss
5735    // of the oat file.
5736    kBootImageClassTable,
5737
5738    // Load from an entry in the .bss section using a PC-relative load.
5739    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5740    kBssEntry,
5741
5742    // Load from the root table associated with the JIT compiled method.
5743    kJitTableAddress,
5744
5745    // Load using a simple runtime call. This is the fall-back load kind when
5746    // the codegen is unable to use another appropriate kind.
5747    kRuntimeCall,
5748
5749    kLast = kRuntimeCall
5750  };
5751
5752  HLoadClass(HCurrentMethod* current_method,
5753             dex::TypeIndex type_index,
5754             const DexFile& dex_file,
5755             Handle<mirror::Class> klass,
5756             bool is_referrers_class,
5757             uint32_t dex_pc,
5758             bool needs_access_check)
5759      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5760        special_input_(HUserRecord<HInstruction*>(current_method)),
5761        type_index_(type_index),
5762        dex_file_(dex_file),
5763        klass_(klass),
5764        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5765    // Referrers class should not need access check. We never inline unverified
5766    // methods so we can't possibly end up in this situation.
5767    DCHECK(!is_referrers_class || !needs_access_check);
5768
5769    SetPackedField<LoadKindField>(
5770        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
5771    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5772    SetPackedFlag<kFlagIsInBootImage>(false);
5773    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5774  }
5775
5776  void SetLoadKind(LoadKind load_kind);
5777
5778  LoadKind GetLoadKind() const {
5779    return GetPackedField<LoadKindField>();
5780  }
5781
5782  bool CanBeMoved() const OVERRIDE { return true; }
5783
5784  bool InstructionDataEquals(const HInstruction* other) const;
5785
5786  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5787
5788  bool CanBeNull() const OVERRIDE { return false; }
5789
5790  bool NeedsEnvironment() const OVERRIDE {
5791    return CanCallRuntime();
5792  }
5793
5794  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5795    // The entrypoint the code generator is going to call does not do
5796    // clinit of the class.
5797    DCHECK(!NeedsAccessCheck());
5798    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5799  }
5800
5801  bool CanCallRuntime() const {
5802    return NeedsAccessCheck() ||
5803           MustGenerateClinitCheck() ||
5804           GetLoadKind() == LoadKind::kRuntimeCall ||
5805           GetLoadKind() == LoadKind::kBssEntry;
5806  }
5807
5808  bool CanThrow() const OVERRIDE {
5809    return NeedsAccessCheck() ||
5810           MustGenerateClinitCheck() ||
5811           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5812           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5813           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5814           ((GetLoadKind() == LoadKind::kRuntimeCall ||
5815             GetLoadKind() == LoadKind::kBssEntry) &&
5816            !IsInBootImage());
5817  }
5818
5819  ReferenceTypeInfo GetLoadedClassRTI() {
5820    return loaded_class_rti_;
5821  }
5822
5823  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5824    // Make sure we only set exact types (the loaded class should never be merged).
5825    DCHECK(rti.IsExact());
5826    loaded_class_rti_ = rti;
5827  }
5828
5829  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5830  const DexFile& GetDexFile() const { return dex_file_; }
5831
5832  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5833    return GetLoadKind() == LoadKind::kRuntimeCall;
5834  }
5835
5836  static SideEffects SideEffectsForArchRuntimeCalls() {
5837    return SideEffects::CanTriggerGC();
5838  }
5839
5840  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5841  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5842  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5843  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5844
5845  void MarkInBootImage() {
5846    SetPackedFlag<kFlagIsInBootImage>(true);
5847  }
5848
5849  void AddSpecialInput(HInstruction* special_input);
5850
5851  using HInstruction::GetInputRecords;  // Keep the const version visible.
5852  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5853    return ArrayRef<HUserRecord<HInstruction*>>(
5854        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5855  }
5856
5857  DataType::Type GetType() const OVERRIDE {
5858    return DataType::Type::kReference;
5859  }
5860
5861  Handle<mirror::Class> GetClass() const {
5862    return klass_;
5863  }
5864
5865  DECLARE_INSTRUCTION(LoadClass);
5866
5867 private:
5868  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5869  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5870  // Whether this instruction must generate the initialization check.
5871  // Used for code generation.
5872  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5873  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5874  static constexpr size_t kFieldLoadKindSize =
5875      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5876  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5877  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5878  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5879
5880  static bool HasTypeReference(LoadKind load_kind) {
5881    return load_kind == LoadKind::kReferrersClass ||
5882        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5883        load_kind == LoadKind::kBootImageClassTable ||
5884        load_kind == LoadKind::kBssEntry ||
5885        load_kind == LoadKind::kRuntimeCall;
5886  }
5887
5888  void SetLoadKindInternal(LoadKind load_kind);
5889
5890  // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
5891  // For other load kinds it's empty or possibly some architecture-specific instruction
5892  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5893  HUserRecord<HInstruction*> special_input_;
5894
5895  // A type index and dex file where the class can be accessed. The dex file can be:
5896  // - The compiling method's dex file if the class is defined there too.
5897  // - The compiling method's dex file if the class is referenced there.
5898  // - The dex file where the class is defined. When the load kind can only be
5899  //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
5900  const dex::TypeIndex type_index_;
5901  const DexFile& dex_file_;
5902
5903  Handle<mirror::Class> klass_;
5904
5905  ReferenceTypeInfo loaded_class_rti_;
5906
5907  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5908};
5909std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5910
5911// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5912inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5913  // The special input is used for PC-relative loads on some architectures,
5914  // including literal pool loads, which are PC-relative too.
5915  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5916         GetLoadKind() == LoadKind::kBootImageAddress ||
5917         GetLoadKind() == LoadKind::kBootImageClassTable ||
5918         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5919  DCHECK(special_input_.GetInstruction() == nullptr);
5920  special_input_ = HUserRecord<HInstruction*>(special_input);
5921  special_input->AddUseAt(this, 0);
5922}
5923
5924class HLoadString FINAL : public HInstruction {
5925 public:
5926  // Determines how to load the String.
5927  enum class LoadKind {
5928    // Use PC-relative boot image String* address that will be known at link time.
5929    // Used for boot image strings referenced by boot image code.
5930    kBootImageLinkTimePcRelative,
5931
5932    // Use a known boot image String* address, embedded in the code by the codegen.
5933    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5934    kBootImageAddress,
5935
5936    // Use a PC-relative load from a boot image InternTable mmapped into the .bss
5937    // of the oat file.
5938    kBootImageInternTable,
5939
5940    // Load from an entry in the .bss section using a PC-relative load.
5941    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5942    kBssEntry,
5943
5944    // Load from the root table associated with the JIT compiled method.
5945    kJitTableAddress,
5946
5947    // Load using a simple runtime call. This is the fall-back load kind when
5948    // the codegen is unable to use another appropriate kind.
5949    kRuntimeCall,
5950
5951    kLast = kRuntimeCall,
5952  };
5953
5954  HLoadString(HCurrentMethod* current_method,
5955              dex::StringIndex string_index,
5956              const DexFile& dex_file,
5957              uint32_t dex_pc)
5958      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5959        special_input_(HUserRecord<HInstruction*>(current_method)),
5960        string_index_(string_index),
5961        dex_file_(dex_file) {
5962    SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
5963  }
5964
5965  void SetLoadKind(LoadKind load_kind);
5966
5967  LoadKind GetLoadKind() const {
5968    return GetPackedField<LoadKindField>();
5969  }
5970
5971  const DexFile& GetDexFile() const {
5972    return dex_file_;
5973  }
5974
5975  dex::StringIndex GetStringIndex() const {
5976    return string_index_;
5977  }
5978
5979  Handle<mirror::String> GetString() const {
5980    return string_;
5981  }
5982
5983  void SetString(Handle<mirror::String> str) {
5984    string_ = str;
5985  }
5986
5987  bool CanBeMoved() const OVERRIDE { return true; }
5988
5989  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5990
5991  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5992
5993  // Will call the runtime if we need to load the string through
5994  // the dex cache and the string is not guaranteed to be there yet.
5995  bool NeedsEnvironment() const OVERRIDE {
5996    LoadKind load_kind = GetLoadKind();
5997    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5998        load_kind == LoadKind::kBootImageAddress ||
5999        load_kind == LoadKind::kBootImageInternTable ||
6000        load_kind == LoadKind::kJitTableAddress) {
6001      return false;
6002    }
6003    return true;
6004  }
6005
6006  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
6007    return GetLoadKind() == LoadKind::kRuntimeCall;
6008  }
6009
6010  bool CanBeNull() const OVERRIDE { return false; }
6011  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
6012
6013  static SideEffects SideEffectsForArchRuntimeCalls() {
6014    return SideEffects::CanTriggerGC();
6015  }
6016
6017  void AddSpecialInput(HInstruction* special_input);
6018
6019  using HInstruction::GetInputRecords;  // Keep the const version visible.
6020  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6021    return ArrayRef<HUserRecord<HInstruction*>>(
6022        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6023  }
6024
6025  DataType::Type GetType() const OVERRIDE {
6026    return DataType::Type::kReference;
6027  }
6028
6029  DECLARE_INSTRUCTION(LoadString);
6030
6031 private:
6032  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6033  static constexpr size_t kFieldLoadKindSize =
6034      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6035  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6036  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6037  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6038
6039  void SetLoadKindInternal(LoadKind load_kind);
6040
6041  // The special input is the HCurrentMethod for kRuntimeCall.
6042  // For other load kinds it's empty or possibly some architecture-specific instruction
6043  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6044  HUserRecord<HInstruction*> special_input_;
6045
6046  dex::StringIndex string_index_;
6047  const DexFile& dex_file_;
6048
6049  Handle<mirror::String> string_;
6050
6051  DISALLOW_COPY_AND_ASSIGN(HLoadString);
6052};
6053std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6054
6055// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6056inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6057  // The special input is used for PC-relative loads on some architectures,
6058  // including literal pool loads, which are PC-relative too.
6059  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6060         GetLoadKind() == LoadKind::kBootImageAddress ||
6061         GetLoadKind() == LoadKind::kBootImageInternTable ||
6062         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6063  // HLoadString::GetInputRecords() returns an empty array at this point,
6064  // so use the GetInputRecords() from the base class to set the input record.
6065  DCHECK(special_input_.GetInstruction() == nullptr);
6066  special_input_ = HUserRecord<HInstruction*>(special_input);
6067  special_input->AddUseAt(this, 0);
6068}
6069
6070/**
6071 * Performs an initialization check on its Class object input.
6072 */
6073class HClinitCheck FINAL : public HExpression<1> {
6074 public:
6075  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6076      : HExpression(
6077            DataType::Type::kReference,
6078            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6079            dex_pc) {
6080    SetRawInputAt(0, constant);
6081  }
6082
6083  bool CanBeMoved() const OVERRIDE { return true; }
6084  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6085    return true;
6086  }
6087
6088  bool NeedsEnvironment() const OVERRIDE {
6089    // May call runtime to initialize the class.
6090    return true;
6091  }
6092
6093  bool CanThrow() const OVERRIDE { return true; }
6094
6095  HLoadClass* GetLoadClass() const {
6096    DCHECK(InputAt(0)->IsLoadClass());
6097    return InputAt(0)->AsLoadClass();
6098  }
6099
6100  DECLARE_INSTRUCTION(ClinitCheck);
6101
6102 private:
6103  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6104};
6105
6106class HStaticFieldGet FINAL : public HExpression<1> {
6107 public:
6108  HStaticFieldGet(HInstruction* cls,
6109                  ArtField* field,
6110                  DataType::Type field_type,
6111                  MemberOffset field_offset,
6112                  bool is_volatile,
6113                  uint32_t field_idx,
6114                  uint16_t declaring_class_def_index,
6115                  const DexFile& dex_file,
6116                  uint32_t dex_pc)
6117      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6118        field_info_(field,
6119                    field_offset,
6120                    field_type,
6121                    is_volatile,
6122                    field_idx,
6123                    declaring_class_def_index,
6124                    dex_file) {
6125    SetRawInputAt(0, cls);
6126  }
6127
6128
6129  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6130
6131  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6132    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6133    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6134  }
6135
6136  size_t ComputeHashCode() const OVERRIDE {
6137    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6138  }
6139
6140  const FieldInfo& GetFieldInfo() const { return field_info_; }
6141  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6142  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6143  bool IsVolatile() const { return field_info_.IsVolatile(); }
6144
6145  DECLARE_INSTRUCTION(StaticFieldGet);
6146
6147 private:
6148  const FieldInfo field_info_;
6149
6150  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6151};
6152
6153class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6154 public:
6155  HStaticFieldSet(HInstruction* cls,
6156                  HInstruction* value,
6157                  ArtField* field,
6158                  DataType::Type field_type,
6159                  MemberOffset field_offset,
6160                  bool is_volatile,
6161                  uint32_t field_idx,
6162                  uint16_t declaring_class_def_index,
6163                  const DexFile& dex_file,
6164                  uint32_t dex_pc)
6165      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6166        field_info_(field,
6167                    field_offset,
6168                    field_type,
6169                    is_volatile,
6170                    field_idx,
6171                    declaring_class_def_index,
6172                    dex_file) {
6173    SetPackedFlag<kFlagValueCanBeNull>(true);
6174    SetRawInputAt(0, cls);
6175    SetRawInputAt(1, value);
6176  }
6177
6178  const FieldInfo& GetFieldInfo() const { return field_info_; }
6179  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6180  DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6181  bool IsVolatile() const { return field_info_.IsVolatile(); }
6182
6183  HInstruction* GetValue() const { return InputAt(1); }
6184  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6185  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6186
6187  DECLARE_INSTRUCTION(StaticFieldSet);
6188
6189 private:
6190  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6191  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6192  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6193                "Too many packed fields.");
6194
6195  const FieldInfo field_info_;
6196
6197  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6198};
6199
6200class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6201 public:
6202  HUnresolvedInstanceFieldGet(HInstruction* obj,
6203                              DataType::Type field_type,
6204                              uint32_t field_index,
6205                              uint32_t dex_pc)
6206      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6207        field_index_(field_index) {
6208    SetRawInputAt(0, obj);
6209  }
6210
6211  bool NeedsEnvironment() const OVERRIDE { return true; }
6212  bool CanThrow() const OVERRIDE { return true; }
6213
6214  DataType::Type GetFieldType() const { return GetType(); }
6215  uint32_t GetFieldIndex() const { return field_index_; }
6216
6217  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6218
6219 private:
6220  const uint32_t field_index_;
6221
6222  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6223};
6224
6225class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6226 public:
6227  HUnresolvedInstanceFieldSet(HInstruction* obj,
6228                              HInstruction* value,
6229                              DataType::Type field_type,
6230                              uint32_t field_index,
6231                              uint32_t dex_pc)
6232      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6233        field_index_(field_index) {
6234    SetPackedField<FieldTypeField>(field_type);
6235    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6236    SetRawInputAt(0, obj);
6237    SetRawInputAt(1, value);
6238  }
6239
6240  bool NeedsEnvironment() const OVERRIDE { return true; }
6241  bool CanThrow() const OVERRIDE { return true; }
6242
6243  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6244  uint32_t GetFieldIndex() const { return field_index_; }
6245
6246  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6247
6248 private:
6249  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6250  static constexpr size_t kFieldFieldTypeSize =
6251      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6252  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6253      kFieldFieldType + kFieldFieldTypeSize;
6254  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6255                "Too many packed fields.");
6256  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6257
6258  const uint32_t field_index_;
6259
6260  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6261};
6262
6263class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6264 public:
6265  HUnresolvedStaticFieldGet(DataType::Type field_type,
6266                            uint32_t field_index,
6267                            uint32_t dex_pc)
6268      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6269        field_index_(field_index) {
6270  }
6271
6272  bool NeedsEnvironment() const OVERRIDE { return true; }
6273  bool CanThrow() const OVERRIDE { return true; }
6274
6275  DataType::Type GetFieldType() const { return GetType(); }
6276  uint32_t GetFieldIndex() const { return field_index_; }
6277
6278  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6279
6280 private:
6281  const uint32_t field_index_;
6282
6283  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6284};
6285
6286class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6287 public:
6288  HUnresolvedStaticFieldSet(HInstruction* value,
6289                            DataType::Type field_type,
6290                            uint32_t field_index,
6291                            uint32_t dex_pc)
6292      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6293        field_index_(field_index) {
6294    SetPackedField<FieldTypeField>(field_type);
6295    DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6296    SetRawInputAt(0, value);
6297  }
6298
6299  bool NeedsEnvironment() const OVERRIDE { return true; }
6300  bool CanThrow() const OVERRIDE { return true; }
6301
6302  DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6303  uint32_t GetFieldIndex() const { return field_index_; }
6304
6305  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6306
6307 private:
6308  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6309  static constexpr size_t kFieldFieldTypeSize =
6310      MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6311  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6312      kFieldFieldType + kFieldFieldTypeSize;
6313  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6314                "Too many packed fields.");
6315  using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6316
6317  const uint32_t field_index_;
6318
6319  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6320};
6321
6322// Implement the move-exception DEX instruction.
6323class HLoadException FINAL : public HExpression<0> {
6324 public:
6325  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6326      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc) {}
6327
6328  bool CanBeNull() const OVERRIDE { return false; }
6329
6330  DECLARE_INSTRUCTION(LoadException);
6331
6332 private:
6333  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6334};
6335
6336// Implicit part of move-exception which clears thread-local exception storage.
6337// Must not be removed because the runtime expects the TLS to get cleared.
6338class HClearException FINAL : public HTemplateInstruction<0> {
6339 public:
6340  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6341      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6342
6343  DECLARE_INSTRUCTION(ClearException);
6344
6345 private:
6346  DISALLOW_COPY_AND_ASSIGN(HClearException);
6347};
6348
6349class HThrow FINAL : public HTemplateInstruction<1> {
6350 public:
6351  HThrow(HInstruction* exception, uint32_t dex_pc)
6352      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6353    SetRawInputAt(0, exception);
6354  }
6355
6356  bool IsControlFlow() const OVERRIDE { return true; }
6357
6358  bool NeedsEnvironment() const OVERRIDE { return true; }
6359
6360  bool CanThrow() const OVERRIDE { return true; }
6361
6362
6363  DECLARE_INSTRUCTION(Throw);
6364
6365 private:
6366  DISALLOW_COPY_AND_ASSIGN(HThrow);
6367};
6368
6369/**
6370 * Implementation strategies for the code generator of a HInstanceOf
6371 * or `HCheckCast`.
6372 */
6373enum class TypeCheckKind {
6374  kUnresolvedCheck,       // Check against an unresolved type.
6375  kExactCheck,            // Can do a single class compare.
6376  kClassHierarchyCheck,   // Can just walk the super class chain.
6377  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6378  kInterfaceCheck,        // No optimization yet when checking against an interface.
6379  kArrayObjectCheck,      // Can just check if the array is not primitive.
6380  kArrayCheck,            // No optimization yet when checking against a generic array.
6381  kLast = kArrayCheck
6382};
6383
6384std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6385
6386class HInstanceOf FINAL : public HExpression<2> {
6387 public:
6388  HInstanceOf(HInstruction* object,
6389              HLoadClass* constant,
6390              TypeCheckKind check_kind,
6391              uint32_t dex_pc)
6392      : HExpression(DataType::Type::kBool,
6393                    SideEffectsForArchRuntimeCalls(check_kind),
6394                    dex_pc) {
6395    SetPackedField<TypeCheckKindField>(check_kind);
6396    SetPackedFlag<kFlagMustDoNullCheck>(true);
6397    SetRawInputAt(0, object);
6398    SetRawInputAt(1, constant);
6399  }
6400
6401  bool CanBeMoved() const OVERRIDE { return true; }
6402
6403  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6404    return true;
6405  }
6406
6407  bool NeedsEnvironment() const OVERRIDE {
6408    return CanCallRuntime(GetTypeCheckKind());
6409  }
6410
6411  // Used only in code generation.
6412  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6413  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6414  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6415  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6416
6417  static bool CanCallRuntime(TypeCheckKind check_kind) {
6418    // Mips currently does runtime calls for any other checks.
6419    return check_kind != TypeCheckKind::kExactCheck;
6420  }
6421
6422  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6423    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6424  }
6425
6426  DECLARE_INSTRUCTION(InstanceOf);
6427
6428 private:
6429  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6430  static constexpr size_t kFieldTypeCheckKindSize =
6431      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6432  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6433  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6434  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6435  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6436
6437  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6438};
6439
6440class HBoundType FINAL : public HExpression<1> {
6441 public:
6442  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6443      : HExpression(DataType::Type::kReference, SideEffects::None(), dex_pc),
6444        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6445    SetPackedFlag<kFlagUpperCanBeNull>(true);
6446    SetPackedFlag<kFlagCanBeNull>(true);
6447    DCHECK_EQ(input->GetType(), DataType::Type::kReference);
6448    SetRawInputAt(0, input);
6449  }
6450
6451  // {Get,Set}Upper* should only be used in reference type propagation.
6452  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6453  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6454  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6455
6456  void SetCanBeNull(bool can_be_null) {
6457    DCHECK(GetUpperCanBeNull() || !can_be_null);
6458    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6459  }
6460
6461  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6462
6463  DECLARE_INSTRUCTION(BoundType);
6464
6465 private:
6466  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6467  // is false then CanBeNull() cannot be true).
6468  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6469  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6470  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6471  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6472
6473  // Encodes the most upper class that this instruction can have. In other words
6474  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6475  // It is used to bound the type in cases like:
6476  //   if (x instanceof ClassX) {
6477  //     // uper_bound_ will be ClassX
6478  //   }
6479  ReferenceTypeInfo upper_bound_;
6480
6481  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6482};
6483
6484class HCheckCast FINAL : public HTemplateInstruction<2> {
6485 public:
6486  HCheckCast(HInstruction* object,
6487             HLoadClass* constant,
6488             TypeCheckKind check_kind,
6489             uint32_t dex_pc)
6490      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6491    SetPackedField<TypeCheckKindField>(check_kind);
6492    SetPackedFlag<kFlagMustDoNullCheck>(true);
6493    SetRawInputAt(0, object);
6494    SetRawInputAt(1, constant);
6495  }
6496
6497  bool CanBeMoved() const OVERRIDE { return true; }
6498
6499  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6500    return true;
6501  }
6502
6503  bool NeedsEnvironment() const OVERRIDE {
6504    // Instruction may throw a CheckCastError.
6505    return true;
6506  }
6507
6508  bool CanThrow() const OVERRIDE { return true; }
6509
6510  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6511  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6512  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6513  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6514
6515  DECLARE_INSTRUCTION(CheckCast);
6516
6517 private:
6518  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6519  static constexpr size_t kFieldTypeCheckKindSize =
6520      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6521  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6522  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6523  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6524  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6525
6526  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6527};
6528
6529/**
6530 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6531 * @details We define the combined barrier types that are actually required
6532 * by the Java Memory Model, rather than using exactly the terminology from
6533 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6534 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6535 * store atomicity issues, and the recipes there are not always entirely sufficient.
6536 * The current recipe is as follows:
6537 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6538 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6539 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6540 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6541 *    class has final fields.
6542 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6543 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6544 */
6545enum MemBarrierKind {
6546  kAnyStore,
6547  kLoadAny,
6548  kStoreStore,
6549  kAnyAny,
6550  kNTStoreStore,
6551  kLastBarrierKind = kNTStoreStore
6552};
6553std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6554
6555class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6556 public:
6557  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6558      : HTemplateInstruction(
6559            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6560    SetPackedField<BarrierKindField>(barrier_kind);
6561  }
6562
6563  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6564
6565  DECLARE_INSTRUCTION(MemoryBarrier);
6566
6567 private:
6568  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6569  static constexpr size_t kFieldBarrierKindSize =
6570      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6571  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6572      kFieldBarrierKind + kFieldBarrierKindSize;
6573  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6574                "Too many packed fields.");
6575  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6576
6577  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6578};
6579
6580// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6581// the specified object(s), with respect to any subsequent store that might "publish"
6582// (i.e. make visible) the specified object to another thread.
6583//
6584// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6585// for all final fields (that were set) at the end of the invoked constructor.
6586//
6587// The constructor fence models the freeze actions for the final fields of an object
6588// being constructed (semantically at the end of the constructor). Constructor fences
6589// have a per-object affinity; two separate objects being constructed get two separate
6590// constructor fences.
6591//
6592// (Note: that if calling a super-constructor or forwarding to another constructor,
6593// the freezes would happen at the end of *that* constructor being invoked).
6594//
6595// The memory model guarantees that when the object being constructed is "published" after
6596// constructor completion (i.e. escapes the current thread via a store), then any final field
6597// writes must be observable on other threads (once they observe that publication).
6598//
6599// Further, anything written before the freeze, and read by dereferencing through the final field,
6600// must also be visible (so final object field could itself have an object with non-final fields;
6601// yet the freeze must also extend to them).
6602//
6603// Constructor example:
6604//
6605//     class HasFinal {
6606//        final int field;                              Optimizing IR for <init>()V:
6607//        HasFinal() {
6608//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6609//          // freeze(this.field);                      HConstructorFence(this)
6610//        }                                             HReturn
6611//     }
6612//
6613// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6614// already-initialized classes; in that case the allocation must act as a "default-initializer"
6615// of the object which effectively writes the class pointer "final field".
6616//
6617// For example, we can model default-initialiation as roughly the equivalent of the following:
6618//
6619//     class Object {
6620//       private final Class header;
6621//     }
6622//
6623//  Java code:                                           Optimizing IR:
6624//
6625//     T new_instance<T>() {
6626//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6627//       obj.header = T.class;                           // header write is done by above call.
6628//       // freeze(obj.header)                           HConstructorFence(obj)
6629//       return (T)obj;
6630//     }
6631//
6632// See also:
6633// * CompilerDriver::RequiresConstructorBarrier
6634// * QuasiAtomic::ThreadFenceForConstructor
6635//
6636class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6637                                  // A fence has variable inputs because the inputs can be removed
6638                                  // after prepare_for_register_allocation phase.
6639                                  // (TODO: In the future a fence could freeze multiple objects
6640                                  //        after merging two fences together.)
6641 public:
6642  // `fence_object` is the reference that needs to be protected for correct publication.
6643  //
6644  // It makes sense in the following situations:
6645  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6646  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6647  //
6648  // After construction the `fence_object` becomes the 0th input.
6649  // This is not an input in a real sense, but just a convenient place to stash the information
6650  // about the associated object.
6651  HConstructorFence(HInstruction* fence_object,
6652                    uint32_t dex_pc,
6653                    ArenaAllocator* allocator)
6654    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6655    // constraints described in the class header. We claim that these SideEffects constraints
6656    // enforce a superset of the real constraints.
6657    //
6658    // The ordering described above is conservatively modeled with SideEffects as follows:
6659    //
6660    // * To prevent reordering of the publication stores:
6661    // ----> "Reads of objects" is the initial SideEffect.
6662    // * For every primitive final field store in the constructor:
6663    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6664    // * If there are any stores to reference final fields in the constructor:
6665    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6666    //       that are reachable from `fence_object` also need to be prevented for reordering
6667    //       (and we do not want to do alias analysis to figure out what those stores are).
6668    //
6669    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6670    // even more conservative approach than the one described above, and prevents all of the
6671    // above reordering without analyzing any of the instructions in the constructor.
6672    //
6673    // If in a later phase we discover that there are no writes to reference final fields,
6674    // we can refine the side effect to a smaller set of type reads (see above constraints).
6675      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6676                                      dex_pc,
6677                                      allocator,
6678                                      /* number_of_inputs */ 1,
6679                                      kArenaAllocConstructorFenceInputs) {
6680    DCHECK(fence_object != nullptr);
6681    SetRawInputAt(0, fence_object);
6682  }
6683
6684  // The object associated with this constructor fence.
6685  //
6686  // (Note: This will be null after the prepare_for_register_allocation phase,
6687  // as all constructor fence inputs are removed there).
6688  HInstruction* GetFenceObject() const {
6689    return InputAt(0);
6690  }
6691
6692  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6693  // - Delete `fence_use` from `this`'s use list.
6694  // - Delete `this` from `fence_use`'s inputs list.
6695  // - If the `fence_use` is dead, remove it from the graph.
6696  //
6697  // A fence is considered dead once it no longer has any uses
6698  // and all of the inputs are dead.
6699  //
6700  // This must *not* be called during/after prepare_for_register_allocation,
6701  // because that removes all the inputs to the fences but the fence is actually
6702  // still considered live.
6703  //
6704  // Returns how many HConstructorFence instructions were removed from graph.
6705  static size_t RemoveConstructorFences(HInstruction* instruction);
6706
6707  // Combine all inputs of `this` and `other` instruction and remove
6708  // `other` from the graph.
6709  //
6710  // Inputs are unique after the merge.
6711  //
6712  // Requirement: `this` must not be the same as `other.
6713  void Merge(HConstructorFence* other);
6714
6715  // Check if this constructor fence is protecting
6716  // an HNewInstance or HNewArray that is also the immediate
6717  // predecessor of `this`.
6718  //
6719  // If `ignore_inputs` is true, then the immediate predecessor doesn't need
6720  // to be one of the inputs of `this`.
6721  //
6722  // Returns the associated HNewArray or HNewInstance,
6723  // or null otherwise.
6724  HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
6725
6726  DECLARE_INSTRUCTION(ConstructorFence);
6727
6728 private:
6729  DISALLOW_COPY_AND_ASSIGN(HConstructorFence);
6730};
6731
6732class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6733 public:
6734  enum class OperationKind {
6735    kEnter,
6736    kExit,
6737    kLast = kExit
6738  };
6739
6740  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6741    : HTemplateInstruction(
6742          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6743          dex_pc) {
6744    SetPackedField<OperationKindField>(kind);
6745    SetRawInputAt(0, object);
6746  }
6747
6748  // Instruction may go into runtime, so we need an environment.
6749  bool NeedsEnvironment() const OVERRIDE { return true; }
6750
6751  bool CanThrow() const OVERRIDE {
6752    // Verifier guarantees that monitor-exit cannot throw.
6753    // This is important because it allows the HGraphBuilder to remove
6754    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6755    return IsEnter();
6756  }
6757
6758  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6759  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6760
6761  DECLARE_INSTRUCTION(MonitorOperation);
6762
6763 private:
6764  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6765  static constexpr size_t kFieldOperationKindSize =
6766      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6767  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6768      kFieldOperationKind + kFieldOperationKindSize;
6769  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6770                "Too many packed fields.");
6771  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6772
6773 private:
6774  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6775};
6776
6777class HSelect FINAL : public HExpression<3> {
6778 public:
6779  HSelect(HInstruction* condition,
6780          HInstruction* true_value,
6781          HInstruction* false_value,
6782          uint32_t dex_pc)
6783      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6784    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6785
6786    // First input must be `true_value` or `false_value` to allow codegens to
6787    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6788    // that architectures which implement HSelect as a conditional move also
6789    // will not need to invert the condition.
6790    SetRawInputAt(0, false_value);
6791    SetRawInputAt(1, true_value);
6792    SetRawInputAt(2, condition);
6793  }
6794
6795  HInstruction* GetFalseValue() const { return InputAt(0); }
6796  HInstruction* GetTrueValue() const { return InputAt(1); }
6797  HInstruction* GetCondition() const { return InputAt(2); }
6798
6799  bool CanBeMoved() const OVERRIDE { return true; }
6800  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6801    return true;
6802  }
6803
6804  bool CanBeNull() const OVERRIDE {
6805    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6806  }
6807
6808  DECLARE_INSTRUCTION(Select);
6809
6810 private:
6811  DISALLOW_COPY_AND_ASSIGN(HSelect);
6812};
6813
6814class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6815 public:
6816  MoveOperands(Location source,
6817               Location destination,
6818               DataType::Type type,
6819               HInstruction* instruction)
6820      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6821
6822  Location GetSource() const { return source_; }
6823  Location GetDestination() const { return destination_; }
6824
6825  void SetSource(Location value) { source_ = value; }
6826  void SetDestination(Location value) { destination_ = value; }
6827
6828  // The parallel move resolver marks moves as "in-progress" by clearing the
6829  // destination (but not the source).
6830  Location MarkPending() {
6831    DCHECK(!IsPending());
6832    Location dest = destination_;
6833    destination_ = Location::NoLocation();
6834    return dest;
6835  }
6836
6837  void ClearPending(Location dest) {
6838    DCHECK(IsPending());
6839    destination_ = dest;
6840  }
6841
6842  bool IsPending() const {
6843    DCHECK(source_.IsValid() || destination_.IsInvalid());
6844    return destination_.IsInvalid() && source_.IsValid();
6845  }
6846
6847  // True if this blocks a move from the given location.
6848  bool Blocks(Location loc) const {
6849    return !IsEliminated() && source_.OverlapsWith(loc);
6850  }
6851
6852  // A move is redundant if it's been eliminated, if its source and
6853  // destination are the same, or if its destination is unneeded.
6854  bool IsRedundant() const {
6855    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6856  }
6857
6858  // We clear both operands to indicate move that's been eliminated.
6859  void Eliminate() {
6860    source_ = destination_ = Location::NoLocation();
6861  }
6862
6863  bool IsEliminated() const {
6864    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6865    return source_.IsInvalid();
6866  }
6867
6868  DataType::Type GetType() const { return type_; }
6869
6870  bool Is64BitMove() const {
6871    return DataType::Is64BitType(type_);
6872  }
6873
6874  HInstruction* GetInstruction() const { return instruction_; }
6875
6876 private:
6877  Location source_;
6878  Location destination_;
6879  // The type this move is for.
6880  DataType::Type type_;
6881  // The instruction this move is assocatied with. Null when this move is
6882  // for moving an input in the expected locations of user (including a phi user).
6883  // This is only used in debug mode, to ensure we do not connect interval siblings
6884  // in the same parallel move.
6885  HInstruction* instruction_;
6886};
6887
6888std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6889
6890static constexpr size_t kDefaultNumberOfMoves = 4;
6891
6892class HParallelMove FINAL : public HTemplateInstruction<0> {
6893 public:
6894  explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
6895      : HTemplateInstruction(SideEffects::None(), dex_pc),
6896        moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
6897    moves_.reserve(kDefaultNumberOfMoves);
6898  }
6899
6900  void AddMove(Location source,
6901               Location destination,
6902               DataType::Type type,
6903               HInstruction* instruction) {
6904    DCHECK(source.IsValid());
6905    DCHECK(destination.IsValid());
6906    if (kIsDebugBuild) {
6907      if (instruction != nullptr) {
6908        for (const MoveOperands& move : moves_) {
6909          if (move.GetInstruction() == instruction) {
6910            // Special case the situation where the move is for the spill slot
6911            // of the instruction.
6912            if ((GetPrevious() == instruction)
6913                || ((GetPrevious() == nullptr)
6914                    && instruction->IsPhi()
6915                    && instruction->GetBlock() == GetBlock())) {
6916              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6917                  << "Doing parallel moves for the same instruction.";
6918            } else {
6919              DCHECK(false) << "Doing parallel moves for the same instruction.";
6920            }
6921          }
6922        }
6923      }
6924      for (const MoveOperands& move : moves_) {
6925        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6926            << "Overlapped destination for two moves in a parallel move: "
6927            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6928            << source << " ==> " << destination;
6929      }
6930    }
6931    moves_.emplace_back(source, destination, type, instruction);
6932  }
6933
6934  MoveOperands* MoveOperandsAt(size_t index) {
6935    return &moves_[index];
6936  }
6937
6938  size_t NumMoves() const { return moves_.size(); }
6939
6940  DECLARE_INSTRUCTION(ParallelMove);
6941
6942 private:
6943  ArenaVector<MoveOperands> moves_;
6944
6945  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6946};
6947
6948}  // namespace art
6949
6950#include "nodes_vector.h"
6951
6952#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6953#include "nodes_shared.h"
6954#endif
6955#ifdef ART_ENABLE_CODEGEN_mips
6956#include "nodes_mips.h"
6957#endif
6958#ifdef ART_ENABLE_CODEGEN_x86
6959#include "nodes_x86.h"
6960#endif
6961
6962namespace art {
6963
6964class OptimizingCompilerStats;
6965
6966class HGraphVisitor : public ValueObject {
6967 public:
6968  explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
6969      : stats_(stats),
6970        graph_(graph) {}
6971  virtual ~HGraphVisitor() {}
6972
6973  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6974  virtual void VisitBasicBlock(HBasicBlock* block);
6975
6976  // Visit the graph following basic block insertion order.
6977  void VisitInsertionOrder();
6978
6979  // Visit the graph following dominator tree reverse post-order.
6980  void VisitReversePostOrder();
6981
6982  HGraph* GetGraph() const { return graph_; }
6983
6984  // Visit functions for instruction classes.
6985#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6986  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6987
6988  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6989
6990#undef DECLARE_VISIT_INSTRUCTION
6991
6992 protected:
6993  OptimizingCompilerStats* stats_;
6994
6995 private:
6996  HGraph* const graph_;
6997
6998  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6999};
7000
7001class HGraphDelegateVisitor : public HGraphVisitor {
7002 public:
7003  explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
7004      : HGraphVisitor(graph, stats) {}
7005  virtual ~HGraphDelegateVisitor() {}
7006
7007  // Visit functions that delegate to to super class.
7008#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7009  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
7010
7011  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7012
7013#undef DECLARE_VISIT_INSTRUCTION
7014
7015 private:
7016  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
7017};
7018
7019// Iterator over the blocks that art part of the loop. Includes blocks part
7020// of an inner loop. The order in which the blocks are iterated is on their
7021// block id.
7022class HBlocksInLoopIterator : public ValueObject {
7023 public:
7024  explicit HBlocksInLoopIterator(const HLoopInformation& info)
7025      : blocks_in_loop_(info.GetBlocks()),
7026        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
7027        index_(0) {
7028    if (!blocks_in_loop_.IsBitSet(index_)) {
7029      Advance();
7030    }
7031  }
7032
7033  bool Done() const { return index_ == blocks_.size(); }
7034  HBasicBlock* Current() const { return blocks_[index_]; }
7035  void Advance() {
7036    ++index_;
7037    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7038      if (blocks_in_loop_.IsBitSet(index_)) {
7039        break;
7040      }
7041    }
7042  }
7043
7044 private:
7045  const BitVector& blocks_in_loop_;
7046  const ArenaVector<HBasicBlock*>& blocks_;
7047  size_t index_;
7048
7049  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
7050};
7051
7052// Iterator over the blocks that art part of the loop. Includes blocks part
7053// of an inner loop. The order in which the blocks are iterated is reverse
7054// post order.
7055class HBlocksInLoopReversePostOrderIterator : public ValueObject {
7056 public:
7057  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
7058      : blocks_in_loop_(info.GetBlocks()),
7059        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
7060        index_(0) {
7061    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7062      Advance();
7063    }
7064  }
7065
7066  bool Done() const { return index_ == blocks_.size(); }
7067  HBasicBlock* Current() const { return blocks_[index_]; }
7068  void Advance() {
7069    ++index_;
7070    for (size_t e = blocks_.size(); index_ < e; ++index_) {
7071      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7072        break;
7073      }
7074    }
7075  }
7076
7077 private:
7078  const BitVector& blocks_in_loop_;
7079  const ArenaVector<HBasicBlock*>& blocks_;
7080  size_t index_;
7081
7082  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7083};
7084
7085// Returns int64_t value of a properly typed constant.
7086inline int64_t Int64FromConstant(HConstant* constant) {
7087  if (constant->IsIntConstant()) {
7088    return constant->AsIntConstant()->GetValue();
7089  } else if (constant->IsLongConstant()) {
7090    return constant->AsLongConstant()->GetValue();
7091  } else {
7092    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7093    return 0;
7094  }
7095}
7096
7097// Returns true iff instruction is an integral constant (and sets value on success).
7098inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7099  if (instruction->IsIntConstant()) {
7100    *value = instruction->AsIntConstant()->GetValue();
7101    return true;
7102  } else if (instruction->IsLongConstant()) {
7103    *value = instruction->AsLongConstant()->GetValue();
7104    return true;
7105  } else if (instruction->IsNullConstant()) {
7106    *value = 0;
7107    return true;
7108  }
7109  return false;
7110}
7111
7112// Returns true iff instruction is the given integral constant.
7113inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
7114  int64_t val = 0;
7115  return IsInt64AndGet(instruction, &val) && val == value;
7116}
7117
7118// Returns true iff instruction is a zero bit pattern.
7119inline bool IsZeroBitPattern(HInstruction* instruction) {
7120  return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
7121}
7122
7123#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7124  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7125  inline const H##type* HInstruction::As##type() const {                       \
7126    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7127  }                                                                            \
7128  inline H##type* HInstruction::As##type() {                                   \
7129    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7130  }
7131
7132  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7133#undef INSTRUCTION_TYPE_CHECK
7134
7135// Create space in `blocks` for adding `number_of_new_blocks` entries
7136// starting at location `at`. Blocks after `at` are moved accordingly.
7137inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7138                        size_t number_of_new_blocks,
7139                        size_t after) {
7140  DCHECK_LT(after, blocks->size());
7141  size_t old_size = blocks->size();
7142  size_t new_size = old_size + number_of_new_blocks;
7143  blocks->resize(new_size);
7144  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7145}
7146
7147/*
7148 * Hunt "under the hood" of array lengths (leading to array references),
7149 * null checks (also leading to array references), and new arrays
7150 * (leading to the actual length). This makes it more likely related
7151 * instructions become actually comparable.
7152 */
7153inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7154  while (instruction->IsArrayLength() ||
7155         instruction->IsNullCheck() ||
7156         instruction->IsNewArray()) {
7157    instruction = instruction->IsNewArray()
7158        ? instruction->AsNewArray()->GetLength()
7159        : instruction->InputAt(0);
7160  }
7161  return instruction;
7162}
7163
7164void RemoveEnvironmentUses(HInstruction* instruction);
7165bool HasEnvironmentUsedByOthers(HInstruction* instruction);
7166void ResetEnvironmentInputRecords(HInstruction* instruction);
7167
7168}  // namespace art
7169
7170#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7171