nodes.h revision ec32f6402382303608544fdac5a88067781bdec5
1f551ea0f6309465eeab70404076bd881320f4883Josh Gao/*
2f551ea0f6309465eeab70404076bd881320f4883Josh Gao * Copyright (C) 2014 The Android Open Source Project
3f551ea0f6309465eeab70404076bd881320f4883Josh Gao *
4f551ea0f6309465eeab70404076bd881320f4883Josh Gao * Licensed under the Apache License, Version 2.0 (the "License");
5f551ea0f6309465eeab70404076bd881320f4883Josh Gao * you may not use this file except in compliance with the License.
6f551ea0f6309465eeab70404076bd881320f4883Josh Gao * You may obtain a copy of the License at
7f551ea0f6309465eeab70404076bd881320f4883Josh Gao *
8f551ea0f6309465eeab70404076bd881320f4883Josh Gao *      http://www.apache.org/licenses/LICENSE-2.0
9f551ea0f6309465eeab70404076bd881320f4883Josh Gao *
10f551ea0f6309465eeab70404076bd881320f4883Josh Gao * Unless required by applicable law or agreed to in writing, software
11f551ea0f6309465eeab70404076bd881320f4883Josh Gao * distributed under the License is distributed on an "AS IS" BASIS,
12f551ea0f6309465eeab70404076bd881320f4883Josh Gao * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13f551ea0f6309465eeab70404076bd881320f4883Josh Gao * See the License for the specific language governing permissions and
14f551ea0f6309465eeab70404076bd881320f4883Josh Gao * limitations under the License.
15f551ea0f6309465eeab70404076bd881320f4883Josh Gao */
16f551ea0f6309465eeab70404076bd881320f4883Josh Gao
17f551ea0f6309465eeab70404076bd881320f4883Josh Gao#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18f551ea0f6309465eeab70404076bd881320f4883Josh Gao#define ART_COMPILER_OPTIMIZING_NODES_H_
19f551ea0f6309465eeab70404076bd881320f4883Josh Gao
20f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include <algorithm>
21f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include <array>
22f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include <type_traits>
23f551ea0f6309465eeab70404076bd881320f4883Josh Gao
24f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/arena_bit_vector.h"
25f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/arena_containers.h"
26f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/arena_object.h"
27f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/array_ref.h"
28f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/iteration_range.h"
29f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/stl_util.h"
30f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "base/transform_array_ref.h"
31f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "dex_file.h"
32f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "dex_file_types.h"
33f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "deoptimization_kind.h"
34f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "entrypoints/quick/quick_entrypoints_enum.h"
35f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "handle.h"
36f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "handle_scope.h"
37f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "invoke_type.h"
38f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "intrinsics_enum.h"
39f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "locations.h"
40f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "method_reference.h"
41f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "mirror/class.h"
42f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "offsets.h"
43f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "primitive.h"
44f551ea0f6309465eeab70404076bd881320f4883Josh Gao#include "utils/intrusive_forward_list.h"
45f551ea0f6309465eeab70404076bd881320f4883Josh Gao
4605a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaonamespace art {
4705a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gao
4805a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass GraphChecker;
4905a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HBasicBlock;
5005a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HConstructorFence;
5105a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HCurrentMethod;
5205a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HDoubleConstant;
5305a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HEnvironment;
5405a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HFloatConstant;
55f551ea0f6309465eeab70404076bd881320f4883Josh Gaoclass HGraphBuilder;
5605a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HGraphVisitor;
5705a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HInstruction;
5805a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HIntConstant;
5905a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HInvoke;
6005a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HLongConstant;
6105a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HNullConstant;
6205a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HParameterValue;
6305a3abfef34b7b7b0f4358c15fe5410cc088aa03Josh Gaoclass HPhi;
64class HSuspendCheck;
65class HTryBoundary;
66class LiveInterval;
67class LocationSummary;
68class SlowPathCode;
69class SsaBuilder;
70
71namespace mirror {
72class DexCache;
73}  // namespace mirror
74
75static const int kDefaultNumberOfBlocks = 8;
76static const int kDefaultNumberOfSuccessors = 2;
77static const int kDefaultNumberOfPredecessors = 2;
78static const int kDefaultNumberOfExceptionalPredecessors = 0;
79static const int kDefaultNumberOfDominatedBlocks = 1;
80static const int kDefaultNumberOfBackEdges = 1;
81
82// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
83static constexpr int32_t kMaxIntShiftDistance = 0x1f;
84// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
85static constexpr int32_t kMaxLongShiftDistance = 0x3f;
86
87static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
88static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
89
90static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
91
92static constexpr uint32_t kNoDexPc = -1;
93
94inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
95  // For the purposes of the compiler, the dex files must actually be the same object
96  // if we want to safely treat them as the same. This is especially important for JIT
97  // as custom class loaders can open the same underlying file (or memory) multiple
98  // times and provide different class resolution but no two class loaders should ever
99  // use the same DexFile object - doing so is an unsupported hack that can lead to
100  // all sorts of weird failures.
101  return &lhs == &rhs;
102}
103
104enum IfCondition {
105  // All types.
106  kCondEQ,  // ==
107  kCondNE,  // !=
108  // Signed integers and floating-point numbers.
109  kCondLT,  // <
110  kCondLE,  // <=
111  kCondGT,  // >
112  kCondGE,  // >=
113  // Unsigned integers.
114  kCondB,   // <
115  kCondBE,  // <=
116  kCondA,   // >
117  kCondAE,  // >=
118  // First and last aliases.
119  kCondFirst = kCondEQ,
120  kCondLast = kCondAE,
121};
122
123enum GraphAnalysisResult {
124  kAnalysisSkipped,
125  kAnalysisInvalidBytecode,
126  kAnalysisFailThrowCatchLoop,
127  kAnalysisFailAmbiguousArrayOp,
128  kAnalysisSuccess,
129};
130
131template <typename T>
132static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
133  return static_cast<typename std::make_unsigned<T>::type>(x);
134}
135
136class HInstructionList : public ValueObject {
137 public:
138  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
139
140  void AddInstruction(HInstruction* instruction);
141  void RemoveInstruction(HInstruction* instruction);
142
143  // Insert `instruction` before/after an existing instruction `cursor`.
144  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
145  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
146
147  // Return true if this list contains `instruction`.
148  bool Contains(HInstruction* instruction) const;
149
150  // Return true if `instruction1` is found before `instruction2` in
151  // this instruction list and false otherwise.  Abort if none
152  // of these instructions is found.
153  bool FoundBefore(const HInstruction* instruction1,
154                   const HInstruction* instruction2) const;
155
156  bool IsEmpty() const { return first_instruction_ == nullptr; }
157  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
158
159  // Update the block of all instructions to be `block`.
160  void SetBlockOfInstructions(HBasicBlock* block) const;
161
162  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
163  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
164  void Add(const HInstructionList& instruction_list);
165
166  // Return the number of instructions in the list. This is an expensive operation.
167  size_t CountSize() const;
168
169 private:
170  HInstruction* first_instruction_;
171  HInstruction* last_instruction_;
172
173  friend class HBasicBlock;
174  friend class HGraph;
175  friend class HInstruction;
176  friend class HInstructionIterator;
177  friend class HInstructionIteratorHandleChanges;
178  friend class HBackwardInstructionIterator;
179
180  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
181};
182
183class ReferenceTypeInfo : ValueObject {
184 public:
185  typedef Handle<mirror::Class> TypeHandle;
186
187  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
188
189  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
190    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
191  }
192
193  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
194    return ReferenceTypeInfo(type_handle, is_exact);
195  }
196
197  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
198
199  static bool IsValidHandle(TypeHandle handle) {
200    return handle.GetReference() != nullptr;
201  }
202
203  bool IsValid() const {
204    return IsValidHandle(type_handle_);
205  }
206
207  bool IsExact() const { return is_exact_; }
208
209  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsObjectClass();
212  }
213
214  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    return GetTypeHandle()->IsStringClass();
217  }
218
219  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
220    DCHECK(IsValid());
221    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
222  }
223
224  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
225    DCHECK(IsValid());
226    return GetTypeHandle()->IsInterface();
227  }
228
229  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230    DCHECK(IsValid());
231    return GetTypeHandle()->IsArrayClass();
232  }
233
234  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
235    DCHECK(IsValid());
236    return GetTypeHandle()->IsPrimitiveArray();
237  }
238
239  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
240    DCHECK(IsValid());
241    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
242  }
243
244  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
245    DCHECK(IsValid());
246    if (!IsExact()) return false;
247    if (!IsArrayClass()) return false;
248    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
249  }
250
251  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
252    DCHECK(IsValid());
253    if (!IsExact()) return false;
254    if (!IsArrayClass()) return false;
255    if (!rti.IsArrayClass()) return false;
256    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
257        rti.GetTypeHandle()->GetComponentType());
258  }
259
260  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
261
262  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
263    DCHECK(IsValid());
264    DCHECK(rti.IsValid());
265    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
266  }
267
268  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269    DCHECK(IsValid());
270    DCHECK(rti.IsValid());
271    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
272        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
273  }
274
275  // Returns true if the type information provide the same amount of details.
276  // Note that it does not mean that the instructions have the same actual type
277  // (because the type can be the result of a merge).
278  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
279    if (!IsValid() && !rti.IsValid()) {
280      // Invalid types are equal.
281      return true;
282    }
283    if (!IsValid() || !rti.IsValid()) {
284      // One is valid, the other not.
285      return false;
286    }
287    return IsExact() == rti.IsExact()
288        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
289  }
290
291 private:
292  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
293  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
294      : type_handle_(type_handle), is_exact_(is_exact) { }
295
296  // The class of the object.
297  TypeHandle type_handle_;
298  // Whether or not the type is exact or a superclass of the actual type.
299  // Whether or not we have any information about this type.
300  bool is_exact_;
301};
302
303std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
304
305// Control-flow graph of a method. Contains a list of basic blocks.
306class HGraph : public ArenaObject<kArenaAllocGraph> {
307 public:
308  HGraph(ArenaAllocator* arena,
309         const DexFile& dex_file,
310         uint32_t method_idx,
311         InstructionSet instruction_set,
312         InvokeType invoke_type = kInvalidInvokeType,
313         bool debuggable = false,
314         bool osr = false,
315         int start_instruction_id = 0)
316      : arena_(arena),
317        blocks_(arena->Adapter(kArenaAllocBlockList)),
318        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
319        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
320        entry_block_(nullptr),
321        exit_block_(nullptr),
322        maximum_number_of_out_vregs_(0),
323        number_of_vregs_(0),
324        number_of_in_vregs_(0),
325        temporaries_vreg_slots_(0),
326        has_bounds_checks_(false),
327        has_try_catch_(false),
328        has_simd_(false),
329        has_loops_(false),
330        has_irreducible_loops_(false),
331        debuggable_(debuggable),
332        current_instruction_id_(start_instruction_id),
333        dex_file_(dex_file),
334        method_idx_(method_idx),
335        invoke_type_(invoke_type),
336        in_ssa_form_(false),
337        number_of_cha_guards_(0),
338        instruction_set_(instruction_set),
339        cached_null_constant_(nullptr),
340        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
342        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
343        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
344        cached_current_method_(nullptr),
345        art_method_(nullptr),
346        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
347        osr_(osr),
348        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
349    blocks_.reserve(kDefaultNumberOfBlocks);
350  }
351
352  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
353  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
354
355  ArenaAllocator* GetArena() const { return arena_; }
356  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
357
358  bool IsInSsaForm() const { return in_ssa_form_; }
359  void SetInSsaForm() { in_ssa_form_ = true; }
360
361  HBasicBlock* GetEntryBlock() const { return entry_block_; }
362  HBasicBlock* GetExitBlock() const { return exit_block_; }
363  bool HasExitBlock() const { return exit_block_ != nullptr; }
364
365  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
366  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
367
368  void AddBlock(HBasicBlock* block);
369
370  void ComputeDominanceInformation();
371  void ClearDominanceInformation();
372  void ClearLoopInformation();
373  void FindBackEdges(ArenaBitVector* visited);
374  GraphAnalysisResult BuildDominatorTree();
375  void SimplifyCFG();
376  void SimplifyCatchBlocks();
377
378  // Analyze all natural loops in this graph. Returns a code specifying that it
379  // was successful or the reason for failure. The method will fail if a loop
380  // is a throw-catch loop, i.e. the header is a catch block.
381  GraphAnalysisResult AnalyzeLoops() const;
382
383  // Iterate over blocks to compute try block membership. Needs reverse post
384  // order and loop information.
385  void ComputeTryBlockInformation();
386
387  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
388  // Returns the instruction to replace the invoke expression or null if the
389  // invoke is for a void method. Note that the caller is responsible for replacing
390  // and removing the invoke instruction.
391  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
392
393  // Update the loop and try membership of `block`, which was spawned from `reference`.
394  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
395  // should be the new back edge.
396  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
397                                             HBasicBlock* reference,
398                                             bool replace_if_back_edge);
399
400  // Need to add a couple of blocks to test if the loop body is entered and
401  // put deoptimization instructions, etc.
402  void TransformLoopHeaderForBCE(HBasicBlock* header);
403
404  // Adds a new loop directly after the loop with the given header and exit.
405  // Returns the new preheader.
406  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
407                                             HBasicBlock* body,
408                                             HBasicBlock* exit);
409
410  // Removes `block` from the graph. Assumes `block` has been disconnected from
411  // other blocks and has no instructions or phis.
412  void DeleteDeadEmptyBlock(HBasicBlock* block);
413
414  // Splits the edge between `block` and `successor` while preserving the
415  // indices in the predecessor/successor lists. If there are multiple edges
416  // between the blocks, the lowest indices are used.
417  // Returns the new block which is empty and has the same dex pc as `successor`.
418  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
419
420  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
421  void SimplifyLoop(HBasicBlock* header);
422
423  int32_t GetNextInstructionId() {
424    DCHECK_NE(current_instruction_id_, INT32_MAX);
425    return current_instruction_id_++;
426  }
427
428  int32_t GetCurrentInstructionId() const {
429    return current_instruction_id_;
430  }
431
432  void SetCurrentInstructionId(int32_t id) {
433    DCHECK_GE(id, current_instruction_id_);
434    current_instruction_id_ = id;
435  }
436
437  uint16_t GetMaximumNumberOfOutVRegs() const {
438    return maximum_number_of_out_vregs_;
439  }
440
441  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
442    maximum_number_of_out_vregs_ = new_value;
443  }
444
445  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
446    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
447  }
448
449  void UpdateTemporariesVRegSlots(size_t slots) {
450    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
451  }
452
453  size_t GetTemporariesVRegSlots() const {
454    DCHECK(!in_ssa_form_);
455    return temporaries_vreg_slots_;
456  }
457
458  void SetNumberOfVRegs(uint16_t number_of_vregs) {
459    number_of_vregs_ = number_of_vregs;
460  }
461
462  uint16_t GetNumberOfVRegs() const {
463    return number_of_vregs_;
464  }
465
466  void SetNumberOfInVRegs(uint16_t value) {
467    number_of_in_vregs_ = value;
468  }
469
470  uint16_t GetNumberOfInVRegs() const {
471    return number_of_in_vregs_;
472  }
473
474  uint16_t GetNumberOfLocalVRegs() const {
475    DCHECK(!in_ssa_form_);
476    return number_of_vregs_ - number_of_in_vregs_;
477  }
478
479  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
480    return reverse_post_order_;
481  }
482
483  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
484    DCHECK(GetReversePostOrder()[0] == entry_block_);
485    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
486  }
487
488  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
489    return ReverseRange(GetReversePostOrder());
490  }
491
492  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
493    return linear_order_;
494  }
495
496  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
497    return ReverseRange(GetLinearOrder());
498  }
499
500  bool HasBoundsChecks() const {
501    return has_bounds_checks_;
502  }
503
504  void SetHasBoundsChecks(bool value) {
505    has_bounds_checks_ = value;
506  }
507
508  bool IsDebuggable() const { return debuggable_; }
509
510  // Returns a constant of the given type and value. If it does not exist
511  // already, it is created and inserted into the graph. This method is only for
512  // integral types.
513  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
514
515  // TODO: This is problematic for the consistency of reference type propagation
516  // because it can be created anytime after the pass and thus it will be left
517  // with an invalid type.
518  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
519
520  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
521    return CreateConstant(value, &cached_int_constants_, dex_pc);
522  }
523  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
524    return CreateConstant(value, &cached_long_constants_, dex_pc);
525  }
526  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
527    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
528  }
529  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
530    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
531  }
532
533  HCurrentMethod* GetCurrentMethod();
534
535  const DexFile& GetDexFile() const {
536    return dex_file_;
537  }
538
539  uint32_t GetMethodIdx() const {
540    return method_idx_;
541  }
542
543  // Get the method name (without the signature), e.g. "<init>"
544  const char* GetMethodName() const;
545
546  // Get the pretty method name (class + name + optionally signature).
547  std::string PrettyMethod(bool with_signature = true) const;
548
549  InvokeType GetInvokeType() const {
550    return invoke_type_;
551  }
552
553  InstructionSet GetInstructionSet() const {
554    return instruction_set_;
555  }
556
557  bool IsCompilingOsr() const { return osr_; }
558
559  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
560    return cha_single_implementation_list_;
561  }
562
563  void AddCHASingleImplementationDependency(ArtMethod* method) {
564    cha_single_implementation_list_.insert(method);
565  }
566
567  bool HasShouldDeoptimizeFlag() const {
568    return number_of_cha_guards_ != 0;
569  }
570
571  bool HasTryCatch() const { return has_try_catch_; }
572  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
573
574  bool HasSIMD() const { return has_simd_; }
575  void SetHasSIMD(bool value) { has_simd_ = value; }
576
577  bool HasLoops() const { return has_loops_; }
578  void SetHasLoops(bool value) { has_loops_ = value; }
579
580  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
581  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
582
583  ArtMethod* GetArtMethod() const { return art_method_; }
584  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
585
586  // Returns an instruction with the opposite Boolean value from 'cond'.
587  // The instruction has been inserted into the graph, either as a constant, or
588  // before cursor.
589  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
590
591  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
592
593  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
594  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
595  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
596
597 private:
598  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
599  void RemoveDeadBlocks(const ArenaBitVector& visited);
600
601  template <class InstructionType, typename ValueType>
602  InstructionType* CreateConstant(ValueType value,
603                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
604                                  uint32_t dex_pc = kNoDexPc) {
605    // Try to find an existing constant of the given value.
606    InstructionType* constant = nullptr;
607    auto cached_constant = cache->find(value);
608    if (cached_constant != cache->end()) {
609      constant = cached_constant->second;
610    }
611
612    // If not found or previously deleted, create and cache a new instruction.
613    // Don't bother reviving a previously deleted instruction, for simplicity.
614    if (constant == nullptr || constant->GetBlock() == nullptr) {
615      constant = new (arena_) InstructionType(value, dex_pc);
616      cache->Overwrite(value, constant);
617      InsertConstant(constant);
618    }
619    return constant;
620  }
621
622  void InsertConstant(HConstant* instruction);
623
624  // Cache a float constant into the graph. This method should only be
625  // called by the SsaBuilder when creating "equivalent" instructions.
626  void CacheFloatConstant(HFloatConstant* constant);
627
628  // See CacheFloatConstant comment.
629  void CacheDoubleConstant(HDoubleConstant* constant);
630
631  ArenaAllocator* const arena_;
632
633  // List of blocks in insertion order.
634  ArenaVector<HBasicBlock*> blocks_;
635
636  // List of blocks to perform a reverse post order tree traversal.
637  ArenaVector<HBasicBlock*> reverse_post_order_;
638
639  // List of blocks to perform a linear order tree traversal. Unlike the reverse
640  // post order, this order is not incrementally kept up-to-date.
641  ArenaVector<HBasicBlock*> linear_order_;
642
643  HBasicBlock* entry_block_;
644  HBasicBlock* exit_block_;
645
646  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
647  uint16_t maximum_number_of_out_vregs_;
648
649  // The number of virtual registers in this method. Contains the parameters.
650  uint16_t number_of_vregs_;
651
652  // The number of virtual registers used by parameters of this method.
653  uint16_t number_of_in_vregs_;
654
655  // Number of vreg size slots that the temporaries use (used in baseline compiler).
656  size_t temporaries_vreg_slots_;
657
658  // Flag whether there are bounds checks in the graph. We can skip
659  // BCE if it's false. It's only best effort to keep it up to date in
660  // the presence of code elimination so there might be false positives.
661  bool has_bounds_checks_;
662
663  // Flag whether there are try/catch blocks in the graph. We will skip
664  // try/catch-related passes if it's false. It's only best effort to keep
665  // it up to date in the presence of code elimination so there might be
666  // false positives.
667  bool has_try_catch_;
668
669  // Flag whether SIMD instructions appear in the graph. If true, the
670  // code generators may have to be more careful spilling the wider
671  // contents of SIMD registers.
672  bool has_simd_;
673
674  // Flag whether there are any loops in the graph. We can skip loop
675  // optimization if it's false. It's only best effort to keep it up
676  // to date in the presence of code elimination so there might be false
677  // positives.
678  bool has_loops_;
679
680  // Flag whether there are any irreducible loops in the graph. It's only
681  // best effort to keep it up to date in the presence of code elimination
682  // so there might be false positives.
683  bool has_irreducible_loops_;
684
685  // Indicates whether the graph should be compiled in a way that
686  // ensures full debuggability. If false, we can apply more
687  // aggressive optimizations that may limit the level of debugging.
688  const bool debuggable_;
689
690  // The current id to assign to a newly added instruction. See HInstruction.id_.
691  int32_t current_instruction_id_;
692
693  // The dex file from which the method is from.
694  const DexFile& dex_file_;
695
696  // The method index in the dex file.
697  const uint32_t method_idx_;
698
699  // If inlined, this encodes how the callee is being invoked.
700  const InvokeType invoke_type_;
701
702  // Whether the graph has been transformed to SSA form. Only used
703  // in debug mode to ensure we are not using properties only valid
704  // for non-SSA form (like the number of temporaries).
705  bool in_ssa_form_;
706
707  // Number of CHA guards in the graph. Used to short-circuit the
708  // CHA guard optimization pass when there is no CHA guard left.
709  uint32_t number_of_cha_guards_;
710
711  const InstructionSet instruction_set_;
712
713  // Cached constants.
714  HNullConstant* cached_null_constant_;
715  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
716  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
717  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
718  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
719
720  HCurrentMethod* cached_current_method_;
721
722  // The ArtMethod this graph is for. Note that for AOT, it may be null,
723  // for example for methods whose declaring class could not be resolved
724  // (such as when the superclass could not be found).
725  ArtMethod* art_method_;
726
727  // Keep the RTI of inexact Object to avoid having to pass stack handle
728  // collection pointer to passes which may create NullConstant.
729  ReferenceTypeInfo inexact_object_rti_;
730
731  // Whether we are compiling this graph for on stack replacement: this will
732  // make all loops seen as irreducible and emit special stack maps to mark
733  // compiled code entries which the interpreter can directly jump to.
734  const bool osr_;
735
736  // List of methods that are assumed to have single implementation.
737  ArenaSet<ArtMethod*> cha_single_implementation_list_;
738
739  friend class SsaBuilder;           // For caching constants.
740  friend class SsaLivenessAnalysis;  // For the linear order.
741  friend class HInliner;             // For the reverse post order.
742  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
743  DISALLOW_COPY_AND_ASSIGN(HGraph);
744};
745
746class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
747 public:
748  HLoopInformation(HBasicBlock* header, HGraph* graph)
749      : header_(header),
750        suspend_check_(nullptr),
751        irreducible_(false),
752        contains_irreducible_loop_(false),
753        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
754        // Make bit vector growable, as the number of blocks may change.
755        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
756    back_edges_.reserve(kDefaultNumberOfBackEdges);
757  }
758
759  bool IsIrreducible() const { return irreducible_; }
760  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
761
762  void Dump(std::ostream& os);
763
764  HBasicBlock* GetHeader() const {
765    return header_;
766  }
767
768  void SetHeader(HBasicBlock* block) {
769    header_ = block;
770  }
771
772  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
773  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
774  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
775
776  void AddBackEdge(HBasicBlock* back_edge) {
777    back_edges_.push_back(back_edge);
778  }
779
780  void RemoveBackEdge(HBasicBlock* back_edge) {
781    RemoveElement(back_edges_, back_edge);
782  }
783
784  bool IsBackEdge(const HBasicBlock& block) const {
785    return ContainsElement(back_edges_, &block);
786  }
787
788  size_t NumberOfBackEdges() const {
789    return back_edges_.size();
790  }
791
792  HBasicBlock* GetPreHeader() const;
793
794  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
795    return back_edges_;
796  }
797
798  // Returns the lifetime position of the back edge that has the
799  // greatest lifetime position.
800  size_t GetLifetimeEnd() const;
801
802  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
803    ReplaceElement(back_edges_, existing, new_back_edge);
804  }
805
806  // Finds blocks that are part of this loop.
807  void Populate();
808
809  // Returns whether this loop information contains `block`.
810  // Note that this loop information *must* be populated before entering this function.
811  bool Contains(const HBasicBlock& block) const;
812
813  // Returns whether this loop information is an inner loop of `other`.
814  // Note that `other` *must* be populated before entering this function.
815  bool IsIn(const HLoopInformation& other) const;
816
817  // Returns true if instruction is not defined within this loop.
818  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
819
820  const ArenaBitVector& GetBlocks() const { return blocks_; }
821
822  void Add(HBasicBlock* block);
823  void Remove(HBasicBlock* block);
824
825  void ClearAllBlocks() {
826    blocks_.ClearAllBits();
827  }
828
829  bool HasBackEdgeNotDominatedByHeader() const;
830
831  bool IsPopulated() const {
832    return blocks_.GetHighestBitSet() != -1;
833  }
834
835  bool DominatesAllBackEdges(HBasicBlock* block);
836
837  bool HasExitEdge() const;
838
839 private:
840  // Internal recursive implementation of `Populate`.
841  void PopulateRecursive(HBasicBlock* block);
842  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
843
844  HBasicBlock* header_;
845  HSuspendCheck* suspend_check_;
846  bool irreducible_;
847  bool contains_irreducible_loop_;
848  ArenaVector<HBasicBlock*> back_edges_;
849  ArenaBitVector blocks_;
850
851  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
852};
853
854// Stores try/catch information for basic blocks.
855// Note that HGraph is constructed so that catch blocks cannot simultaneously
856// be try blocks.
857class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
858 public:
859  // Try block information constructor.
860  explicit TryCatchInformation(const HTryBoundary& try_entry)
861      : try_entry_(&try_entry),
862        catch_dex_file_(nullptr),
863        catch_type_index_(DexFile::kDexNoIndex16) {
864    DCHECK(try_entry_ != nullptr);
865  }
866
867  // Catch block information constructor.
868  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
869      : try_entry_(nullptr),
870        catch_dex_file_(&dex_file),
871        catch_type_index_(catch_type_index) {}
872
873  bool IsTryBlock() const { return try_entry_ != nullptr; }
874
875  const HTryBoundary& GetTryEntry() const {
876    DCHECK(IsTryBlock());
877    return *try_entry_;
878  }
879
880  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
881
882  bool IsCatchAllTypeIndex() const {
883    DCHECK(IsCatchBlock());
884    return !catch_type_index_.IsValid();
885  }
886
887  dex::TypeIndex GetCatchTypeIndex() const {
888    DCHECK(IsCatchBlock());
889    return catch_type_index_;
890  }
891
892  const DexFile& GetCatchDexFile() const {
893    DCHECK(IsCatchBlock());
894    return *catch_dex_file_;
895  }
896
897 private:
898  // One of possibly several TryBoundary instructions entering the block's try.
899  // Only set for try blocks.
900  const HTryBoundary* try_entry_;
901
902  // Exception type information. Only set for catch blocks.
903  const DexFile* catch_dex_file_;
904  const dex::TypeIndex catch_type_index_;
905};
906
907static constexpr size_t kNoLifetime = -1;
908static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
909
910// A block in a method. Contains the list of instructions represented
911// as a double linked list. Each block knows its predecessors and
912// successors.
913
914class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
915 public:
916  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
917      : graph_(graph),
918        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
919        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
920        loop_information_(nullptr),
921        dominator_(nullptr),
922        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
923        block_id_(kInvalidBlockId),
924        dex_pc_(dex_pc),
925        lifetime_start_(kNoLifetime),
926        lifetime_end_(kNoLifetime),
927        try_catch_information_(nullptr) {
928    predecessors_.reserve(kDefaultNumberOfPredecessors);
929    successors_.reserve(kDefaultNumberOfSuccessors);
930    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
931  }
932
933  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
934    return predecessors_;
935  }
936
937  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
938    return successors_;
939  }
940
941  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
942  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
943
944  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
945    return ContainsElement(successors_, block, start_from);
946  }
947
948  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
949    return dominated_blocks_;
950  }
951
952  bool IsEntryBlock() const {
953    return graph_->GetEntryBlock() == this;
954  }
955
956  bool IsExitBlock() const {
957    return graph_->GetExitBlock() == this;
958  }
959
960  bool IsSingleGoto() const;
961  bool IsSingleTryBoundary() const;
962
963  // Returns true if this block emits nothing but a jump.
964  bool IsSingleJump() const {
965    HLoopInformation* loop_info = GetLoopInformation();
966    return (IsSingleGoto() || IsSingleTryBoundary())
967           // Back edges generate a suspend check.
968           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
969  }
970
971  void AddBackEdge(HBasicBlock* back_edge) {
972    if (loop_information_ == nullptr) {
973      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
974    }
975    DCHECK_EQ(loop_information_->GetHeader(), this);
976    loop_information_->AddBackEdge(back_edge);
977  }
978
979  HGraph* GetGraph() const { return graph_; }
980  void SetGraph(HGraph* graph) { graph_ = graph; }
981
982  uint32_t GetBlockId() const { return block_id_; }
983  void SetBlockId(int id) { block_id_ = id; }
984  uint32_t GetDexPc() const { return dex_pc_; }
985
986  HBasicBlock* GetDominator() const { return dominator_; }
987  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
988  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
989
990  void RemoveDominatedBlock(HBasicBlock* block) {
991    RemoveElement(dominated_blocks_, block);
992  }
993
994  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
995    ReplaceElement(dominated_blocks_, existing, new_block);
996  }
997
998  void ClearDominanceInformation();
999
1000  int NumberOfBackEdges() const {
1001    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1002  }
1003
1004  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
1005  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
1006  const HInstructionList& GetInstructions() const { return instructions_; }
1007  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1008  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1009  const HInstructionList& GetPhis() const { return phis_; }
1010
1011  HInstruction* GetFirstInstructionDisregardMoves() const;
1012
1013  void AddSuccessor(HBasicBlock* block) {
1014    successors_.push_back(block);
1015    block->predecessors_.push_back(this);
1016  }
1017
1018  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1019    size_t successor_index = GetSuccessorIndexOf(existing);
1020    existing->RemovePredecessor(this);
1021    new_block->predecessors_.push_back(this);
1022    successors_[successor_index] = new_block;
1023  }
1024
1025  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1026    size_t predecessor_index = GetPredecessorIndexOf(existing);
1027    existing->RemoveSuccessor(this);
1028    new_block->successors_.push_back(this);
1029    predecessors_[predecessor_index] = new_block;
1030  }
1031
1032  // Insert `this` between `predecessor` and `successor. This method
1033  // preserves the indicies, and will update the first edge found between
1034  // `predecessor` and `successor`.
1035  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1036    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1037    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1038    successor->predecessors_[predecessor_index] = this;
1039    predecessor->successors_[successor_index] = this;
1040    successors_.push_back(successor);
1041    predecessors_.push_back(predecessor);
1042  }
1043
1044  void RemovePredecessor(HBasicBlock* block) {
1045    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1046  }
1047
1048  void RemoveSuccessor(HBasicBlock* block) {
1049    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1050  }
1051
1052  void ClearAllPredecessors() {
1053    predecessors_.clear();
1054  }
1055
1056  void AddPredecessor(HBasicBlock* block) {
1057    predecessors_.push_back(block);
1058    block->successors_.push_back(this);
1059  }
1060
1061  void SwapPredecessors() {
1062    DCHECK_EQ(predecessors_.size(), 2u);
1063    std::swap(predecessors_[0], predecessors_[1]);
1064  }
1065
1066  void SwapSuccessors() {
1067    DCHECK_EQ(successors_.size(), 2u);
1068    std::swap(successors_[0], successors_[1]);
1069  }
1070
1071  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1072    return IndexOfElement(predecessors_, predecessor);
1073  }
1074
1075  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1076    return IndexOfElement(successors_, successor);
1077  }
1078
1079  HBasicBlock* GetSinglePredecessor() const {
1080    DCHECK_EQ(GetPredecessors().size(), 1u);
1081    return GetPredecessors()[0];
1082  }
1083
1084  HBasicBlock* GetSingleSuccessor() const {
1085    DCHECK_EQ(GetSuccessors().size(), 1u);
1086    return GetSuccessors()[0];
1087  }
1088
1089  // Returns whether the first occurrence of `predecessor` in the list of
1090  // predecessors is at index `idx`.
1091  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1092    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1093    return GetPredecessorIndexOf(predecessor) == idx;
1094  }
1095
1096  // Create a new block between this block and its predecessors. The new block
1097  // is added to the graph, all predecessor edges are relinked to it and an edge
1098  // is created to `this`. Returns the new empty block. Reverse post order or
1099  // loop and try/catch information are not updated.
1100  HBasicBlock* CreateImmediateDominator();
1101
1102  // Split the block into two blocks just before `cursor`. Returns the newly
1103  // created, latter block. Note that this method will add the block to the
1104  // graph, create a Goto at the end of the former block and will create an edge
1105  // between the blocks. It will not, however, update the reverse post order or
1106  // loop and try/catch information.
1107  HBasicBlock* SplitBefore(HInstruction* cursor);
1108
1109  // Split the block into two blocks just before `cursor`. Returns the newly
1110  // created block. Note that this method just updates raw block information,
1111  // like predecessors, successors, dominators, and instruction list. It does not
1112  // update the graph, reverse post order, loop information, nor make sure the
1113  // blocks are consistent (for example ending with a control flow instruction).
1114  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1115
1116  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1117  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1118
1119  // Merge `other` at the end of `this`. Successors and dominated blocks of
1120  // `other` are changed to be successors and dominated blocks of `this`. Note
1121  // that this method does not update the graph, reverse post order, loop
1122  // information, nor make sure the blocks are consistent (for example ending
1123  // with a control flow instruction).
1124  void MergeWithInlined(HBasicBlock* other);
1125
1126  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1127  // of `this` are moved to `other`.
1128  // Note that this method does not update the graph, reverse post order, loop
1129  // information, nor make sure the blocks are consistent (for example ending
1130  // with a control flow instruction).
1131  void ReplaceWith(HBasicBlock* other);
1132
1133  // Merges the instructions of `other` at the end of `this`.
1134  void MergeInstructionsWith(HBasicBlock* other);
1135
1136  // Merge `other` at the end of `this`. This method updates loops, reverse post
1137  // order, links to predecessors, successors, dominators and deletes the block
1138  // from the graph. The two blocks must be successive, i.e. `this` the only
1139  // predecessor of `other` and vice versa.
1140  void MergeWith(HBasicBlock* other);
1141
1142  // Disconnects `this` from all its predecessors, successors and dominator,
1143  // removes it from all loops it is included in and eventually from the graph.
1144  // The block must not dominate any other block. Predecessors and successors
1145  // are safely updated.
1146  void DisconnectAndDelete();
1147
1148  void AddInstruction(HInstruction* instruction);
1149  // Insert `instruction` before/after an existing instruction `cursor`.
1150  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1151  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1152  // Replace instruction `initial` with `replacement` within this block.
1153  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1154                                       HInstruction* replacement);
1155  void AddPhi(HPhi* phi);
1156  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1157  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1158  // instruction list. With 'ensure_safety' set to true, it verifies that the
1159  // instruction is not in use and removes it from the use lists of its inputs.
1160  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1161  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1162  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1163
1164  bool IsLoopHeader() const {
1165    return IsInLoop() && (loop_information_->GetHeader() == this);
1166  }
1167
1168  bool IsLoopPreHeaderFirstPredecessor() const {
1169    DCHECK(IsLoopHeader());
1170    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1171  }
1172
1173  bool IsFirstPredecessorBackEdge() const {
1174    DCHECK(IsLoopHeader());
1175    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1176  }
1177
1178  HLoopInformation* GetLoopInformation() const {
1179    return loop_information_;
1180  }
1181
1182  // Set the loop_information_ on this block. Overrides the current
1183  // loop_information if it is an outer loop of the passed loop information.
1184  // Note that this method is called while creating the loop information.
1185  void SetInLoop(HLoopInformation* info) {
1186    if (IsLoopHeader()) {
1187      // Nothing to do. This just means `info` is an outer loop.
1188    } else if (!IsInLoop()) {
1189      loop_information_ = info;
1190    } else if (loop_information_->Contains(*info->GetHeader())) {
1191      // Block is currently part of an outer loop. Make it part of this inner loop.
1192      // Note that a non loop header having a loop information means this loop information
1193      // has already been populated
1194      loop_information_ = info;
1195    } else {
1196      // Block is part of an inner loop. Do not update the loop information.
1197      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1198      // at this point, because this method is being called while populating `info`.
1199    }
1200  }
1201
1202  // Raw update of the loop information.
1203  void SetLoopInformation(HLoopInformation* info) {
1204    loop_information_ = info;
1205  }
1206
1207  bool IsInLoop() const { return loop_information_ != nullptr; }
1208
1209  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1210
1211  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1212    try_catch_information_ = try_catch_information;
1213  }
1214
1215  bool IsTryBlock() const {
1216    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1217  }
1218
1219  bool IsCatchBlock() const {
1220    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1221  }
1222
1223  // Returns the try entry that this block's successors should have. They will
1224  // be in the same try, unless the block ends in a try boundary. In that case,
1225  // the appropriate try entry will be returned.
1226  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1227
1228  bool HasThrowingInstructions() const;
1229
1230  // Returns whether this block dominates the blocked passed as parameter.
1231  bool Dominates(HBasicBlock* block) const;
1232
1233  size_t GetLifetimeStart() const { return lifetime_start_; }
1234  size_t GetLifetimeEnd() const { return lifetime_end_; }
1235
1236  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1237  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1238
1239  bool EndsWithControlFlowInstruction() const;
1240  bool EndsWithIf() const;
1241  bool EndsWithTryBoundary() const;
1242  bool HasSinglePhi() const;
1243
1244 private:
1245  HGraph* graph_;
1246  ArenaVector<HBasicBlock*> predecessors_;
1247  ArenaVector<HBasicBlock*> successors_;
1248  HInstructionList instructions_;
1249  HInstructionList phis_;
1250  HLoopInformation* loop_information_;
1251  HBasicBlock* dominator_;
1252  ArenaVector<HBasicBlock*> dominated_blocks_;
1253  uint32_t block_id_;
1254  // The dex program counter of the first instruction of this block.
1255  const uint32_t dex_pc_;
1256  size_t lifetime_start_;
1257  size_t lifetime_end_;
1258  TryCatchInformation* try_catch_information_;
1259
1260  friend class HGraph;
1261  friend class HInstruction;
1262
1263  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1264};
1265
1266// Iterates over the LoopInformation of all loops which contain 'block'
1267// from the innermost to the outermost.
1268class HLoopInformationOutwardIterator : public ValueObject {
1269 public:
1270  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1271      : current_(block.GetLoopInformation()) {}
1272
1273  bool Done() const { return current_ == nullptr; }
1274
1275  void Advance() {
1276    DCHECK(!Done());
1277    current_ = current_->GetPreHeader()->GetLoopInformation();
1278  }
1279
1280  HLoopInformation* Current() const {
1281    DCHECK(!Done());
1282    return current_;
1283  }
1284
1285 private:
1286  HLoopInformation* current_;
1287
1288  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1289};
1290
1291#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1292  M(Above, Condition)                                                   \
1293  M(AboveOrEqual, Condition)                                            \
1294  M(Add, BinaryOperation)                                               \
1295  M(And, BinaryOperation)                                               \
1296  M(ArrayGet, Instruction)                                              \
1297  M(ArrayLength, Instruction)                                           \
1298  M(ArraySet, Instruction)                                              \
1299  M(Below, Condition)                                                   \
1300  M(BelowOrEqual, Condition)                                            \
1301  M(BooleanNot, UnaryOperation)                                         \
1302  M(BoundsCheck, Instruction)                                           \
1303  M(BoundType, Instruction)                                             \
1304  M(CheckCast, Instruction)                                             \
1305  M(ClassTableGet, Instruction)                                         \
1306  M(ClearException, Instruction)                                        \
1307  M(ClinitCheck, Instruction)                                           \
1308  M(Compare, BinaryOperation)                                           \
1309  M(ConstructorFence, Instruction)                                      \
1310  M(CurrentMethod, Instruction)                                         \
1311  M(ShouldDeoptimizeFlag, Instruction)                                  \
1312  M(Deoptimize, Instruction)                                            \
1313  M(Div, BinaryOperation)                                               \
1314  M(DivZeroCheck, Instruction)                                          \
1315  M(DoubleConstant, Constant)                                           \
1316  M(Equal, Condition)                                                   \
1317  M(Exit, Instruction)                                                  \
1318  M(FloatConstant, Constant)                                            \
1319  M(Goto, Instruction)                                                  \
1320  M(GreaterThan, Condition)                                             \
1321  M(GreaterThanOrEqual, Condition)                                      \
1322  M(If, Instruction)                                                    \
1323  M(InstanceFieldGet, Instruction)                                      \
1324  M(InstanceFieldSet, Instruction)                                      \
1325  M(InstanceOf, Instruction)                                            \
1326  M(IntConstant, Constant)                                              \
1327  M(InvokeUnresolved, Invoke)                                           \
1328  M(InvokeInterface, Invoke)                                            \
1329  M(InvokeStaticOrDirect, Invoke)                                       \
1330  M(InvokeVirtual, Invoke)                                              \
1331  M(InvokePolymorphic, Invoke)                                          \
1332  M(LessThan, Condition)                                                \
1333  M(LessThanOrEqual, Condition)                                         \
1334  M(LoadClass, Instruction)                                             \
1335  M(LoadException, Instruction)                                         \
1336  M(LoadString, Instruction)                                            \
1337  M(LongConstant, Constant)                                             \
1338  M(MemoryBarrier, Instruction)                                         \
1339  M(MonitorOperation, Instruction)                                      \
1340  M(Mul, BinaryOperation)                                               \
1341  M(NativeDebugInfo, Instruction)                                       \
1342  M(Neg, UnaryOperation)                                                \
1343  M(NewArray, Instruction)                                              \
1344  M(NewInstance, Instruction)                                           \
1345  M(Not, UnaryOperation)                                                \
1346  M(NotEqual, Condition)                                                \
1347  M(NullConstant, Instruction)                                          \
1348  M(NullCheck, Instruction)                                             \
1349  M(Or, BinaryOperation)                                                \
1350  M(PackedSwitch, Instruction)                                          \
1351  M(ParallelMove, Instruction)                                          \
1352  M(ParameterValue, Instruction)                                        \
1353  M(Phi, Instruction)                                                   \
1354  M(Rem, BinaryOperation)                                               \
1355  M(Return, Instruction)                                                \
1356  M(ReturnVoid, Instruction)                                            \
1357  M(Ror, BinaryOperation)                                               \
1358  M(Shl, BinaryOperation)                                               \
1359  M(Shr, BinaryOperation)                                               \
1360  M(StaticFieldGet, Instruction)                                        \
1361  M(StaticFieldSet, Instruction)                                        \
1362  M(UnresolvedInstanceFieldGet, Instruction)                            \
1363  M(UnresolvedInstanceFieldSet, Instruction)                            \
1364  M(UnresolvedStaticFieldGet, Instruction)                              \
1365  M(UnresolvedStaticFieldSet, Instruction)                              \
1366  M(Select, Instruction)                                                \
1367  M(Sub, BinaryOperation)                                               \
1368  M(SuspendCheck, Instruction)                                          \
1369  M(Throw, Instruction)                                                 \
1370  M(TryBoundary, Instruction)                                           \
1371  M(TypeConversion, Instruction)                                        \
1372  M(UShr, BinaryOperation)                                              \
1373  M(Xor, BinaryOperation)                                               \
1374  M(VecReplicateScalar, VecUnaryOperation)                              \
1375  M(VecSumReduce, VecUnaryOperation)                                    \
1376  M(VecCnv, VecUnaryOperation)                                          \
1377  M(VecNeg, VecUnaryOperation)                                          \
1378  M(VecAbs, VecUnaryOperation)                                          \
1379  M(VecNot, VecUnaryOperation)                                          \
1380  M(VecAdd, VecBinaryOperation)                                         \
1381  M(VecHalvingAdd, VecBinaryOperation)                                  \
1382  M(VecSub, VecBinaryOperation)                                         \
1383  M(VecMul, VecBinaryOperation)                                         \
1384  M(VecDiv, VecBinaryOperation)                                         \
1385  M(VecMin, VecBinaryOperation)                                         \
1386  M(VecMax, VecBinaryOperation)                                         \
1387  M(VecAnd, VecBinaryOperation)                                         \
1388  M(VecAndNot, VecBinaryOperation)                                      \
1389  M(VecOr, VecBinaryOperation)                                          \
1390  M(VecXor, VecBinaryOperation)                                         \
1391  M(VecShl, VecBinaryOperation)                                         \
1392  M(VecShr, VecBinaryOperation)                                         \
1393  M(VecUShr, VecBinaryOperation)                                        \
1394  M(VecSetScalars, VecOperation)                                        \
1395  M(VecMultiplyAccumulate, VecOperation)                                \
1396  M(VecLoad, VecMemoryOperation)                                        \
1397  M(VecStore, VecMemoryOperation)                                       \
1398
1399/*
1400 * Instructions, shared across several (not all) architectures.
1401 */
1402#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1403#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1404#else
1405#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1406  M(BitwiseNegatedRight, Instruction)                                   \
1407  M(DataProcWithShifterOp, Instruction)                                 \
1408  M(MultiplyAccumulate, Instruction)                                    \
1409  M(IntermediateAddress, Instruction)                                   \
1410  M(IntermediateAddressIndex, Instruction)
1411#endif
1412
1413#ifndef ART_ENABLE_CODEGEN_arm
1414#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1415#else
1416#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1417  M(ArmDexCacheArraysBase, Instruction)
1418#endif
1419
1420#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1421
1422#ifndef ART_ENABLE_CODEGEN_mips
1423#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1424#else
1425#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1426  M(MipsComputeBaseMethodAddress, Instruction)                          \
1427  M(MipsDexCacheArraysBase, Instruction)                                \
1428  M(MipsPackedSwitch, Instruction)
1429#endif
1430
1431#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1432
1433#ifndef ART_ENABLE_CODEGEN_x86
1434#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1435#else
1436#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1437  M(X86ComputeBaseMethodAddress, Instruction)                           \
1438  M(X86LoadFromConstantTable, Instruction)                              \
1439  M(X86FPNeg, Instruction)                                              \
1440  M(X86PackedSwitch, Instruction)
1441#endif
1442
1443#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1444
1445#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1446  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1447  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1448  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1449  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1450  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1451  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1452  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1453  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1454
1455#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1456  M(Condition, BinaryOperation)                                         \
1457  M(Constant, Instruction)                                              \
1458  M(UnaryOperation, Instruction)                                        \
1459  M(BinaryOperation, Instruction)                                       \
1460  M(Invoke, Instruction)                                                \
1461  M(VecOperation, Instruction)                                          \
1462  M(VecUnaryOperation, VecOperation)                                    \
1463  M(VecBinaryOperation, VecOperation)                                   \
1464  M(VecMemoryOperation, VecOperation)
1465
1466#define FOR_EACH_INSTRUCTION(M)                                         \
1467  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1468  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1469
1470#define FORWARD_DECLARATION(type, super) class H##type;
1471FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1472#undef FORWARD_DECLARATION
1473
1474#define DECLARE_INSTRUCTION(type)                                         \
1475  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1476  const char* DebugName() const OVERRIDE { return #type; }                \
1477  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1478    return other->Is##type();                                             \
1479  }                                                                       \
1480  void Accept(HGraphVisitor* visitor) OVERRIDE
1481
1482#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1483  bool Is##type() const { return As##type() != nullptr; }               \
1484  const H##type* As##type() const { return this; }                      \
1485  H##type* As##type() { return this; }
1486
1487template <typename T>
1488class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1489 public:
1490  // Get the instruction which has this use as one of the inputs.
1491  T GetUser() const { return user_; }
1492  // Get the position of the input record that this use corresponds to.
1493  size_t GetIndex() const { return index_; }
1494  // Set the position of the input record that this use corresponds to.
1495  void SetIndex(size_t index) { index_ = index; }
1496
1497  // Hook for the IntrusiveForwardList<>.
1498  // TODO: Hide this better.
1499  IntrusiveForwardListHook hook;
1500
1501 private:
1502  HUseListNode(T user, size_t index)
1503      : user_(user), index_(index) {}
1504
1505  T const user_;
1506  size_t index_;
1507
1508  friend class HInstruction;
1509
1510  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1511};
1512
1513template <typename T>
1514using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1515
1516// This class is used by HEnvironment and HInstruction classes to record the
1517// instructions they use and pointers to the corresponding HUseListNodes kept
1518// by the used instructions.
1519template <typename T>
1520class HUserRecord : public ValueObject {
1521 public:
1522  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1523  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1524
1525  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1526      : HUserRecord(old_record.instruction_, before_use_node) {}
1527  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1528      : instruction_(instruction), before_use_node_(before_use_node) {
1529    DCHECK(instruction_ != nullptr);
1530  }
1531
1532  HInstruction* GetInstruction() const { return instruction_; }
1533  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1534  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1535
1536 private:
1537  // Instruction used by the user.
1538  HInstruction* instruction_;
1539
1540  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1541  typename HUseList<T>::iterator before_use_node_;
1542};
1543
1544// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1545// This is used for HInstruction::GetInputs() to return a container wrapper providing
1546// HInstruction* values even though the underlying container has HUserRecord<>s.
1547struct HInputExtractor {
1548  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1549    return record.GetInstruction();
1550  }
1551  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1552    return record.GetInstruction();
1553  }
1554};
1555
1556using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1557using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1558
1559/**
1560 * Side-effects representation.
1561 *
1562 * For write/read dependences on fields/arrays, the dependence analysis uses
1563 * type disambiguation (e.g. a float field write cannot modify the value of an
1564 * integer field read) and the access type (e.g.  a reference array write cannot
1565 * modify the value of a reference field read [although it may modify the
1566 * reference fetch prior to reading the field, which is represented by its own
1567 * write/read dependence]). The analysis makes conservative points-to
1568 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1569 * the same, and any reference read depends on any reference read without
1570 * further regard of its type).
1571 *
1572 * The internal representation uses 38-bit and is described in the table below.
1573 * The first line indicates the side effect, and for field/array accesses the
1574 * second line indicates the type of the access (in the order of the
1575 * Primitive::Type enum).
1576 * The two numbered lines below indicate the bit position in the bitfield (read
1577 * vertically).
1578 *
1579 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1580 *   +-------------+---------+---------+--------------+---------+---------+
1581 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1582 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1583 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1584 *
1585 * Note that, to ease the implementation, 'changes' bits are least significant
1586 * bits, while 'dependency' bits are most significant bits.
1587 */
1588class SideEffects : public ValueObject {
1589 public:
1590  SideEffects() : flags_(0) {}
1591
1592  static SideEffects None() {
1593    return SideEffects(0);
1594  }
1595
1596  static SideEffects All() {
1597    return SideEffects(kAllChangeBits | kAllDependOnBits);
1598  }
1599
1600  static SideEffects AllChanges() {
1601    return SideEffects(kAllChangeBits);
1602  }
1603
1604  static SideEffects AllDependencies() {
1605    return SideEffects(kAllDependOnBits);
1606  }
1607
1608  static SideEffects AllExceptGCDependency() {
1609    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1610  }
1611
1612  static SideEffects AllWritesAndReads() {
1613    return SideEffects(kAllWrites | kAllReads);
1614  }
1615
1616  static SideEffects AllWrites() {
1617    return SideEffects(kAllWrites);
1618  }
1619
1620  static SideEffects AllReads() {
1621    return SideEffects(kAllReads);
1622  }
1623
1624  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1625    return is_volatile
1626        ? AllWritesAndReads()
1627        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1628  }
1629
1630  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1631    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1632  }
1633
1634  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1635    return is_volatile
1636        ? AllWritesAndReads()
1637        : SideEffects(TypeFlag(type, kFieldReadOffset));
1638  }
1639
1640  static SideEffects ArrayReadOfType(Primitive::Type type) {
1641    return SideEffects(TypeFlag(type, kArrayReadOffset));
1642  }
1643
1644  static SideEffects CanTriggerGC() {
1645    return SideEffects(1ULL << kCanTriggerGCBit);
1646  }
1647
1648  static SideEffects DependsOnGC() {
1649    return SideEffects(1ULL << kDependsOnGCBit);
1650  }
1651
1652  // Combines the side-effects of this and the other.
1653  SideEffects Union(SideEffects other) const {
1654    return SideEffects(flags_ | other.flags_);
1655  }
1656
1657  SideEffects Exclusion(SideEffects other) const {
1658    return SideEffects(flags_ & ~other.flags_);
1659  }
1660
1661  void Add(SideEffects other) {
1662    flags_ |= other.flags_;
1663  }
1664
1665  bool Includes(SideEffects other) const {
1666    return (other.flags_ & flags_) == other.flags_;
1667  }
1668
1669  bool HasSideEffects() const {
1670    return (flags_ & kAllChangeBits);
1671  }
1672
1673  bool HasDependencies() const {
1674    return (flags_ & kAllDependOnBits);
1675  }
1676
1677  // Returns true if there are no side effects or dependencies.
1678  bool DoesNothing() const {
1679    return flags_ == 0;
1680  }
1681
1682  // Returns true if something is written.
1683  bool DoesAnyWrite() const {
1684    return (flags_ & kAllWrites);
1685  }
1686
1687  // Returns true if something is read.
1688  bool DoesAnyRead() const {
1689    return (flags_ & kAllReads);
1690  }
1691
1692  // Returns true if potentially everything is written and read
1693  // (every type and every kind of access).
1694  bool DoesAllReadWrite() const {
1695    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1696  }
1697
1698  bool DoesAll() const {
1699    return flags_ == (kAllChangeBits | kAllDependOnBits);
1700  }
1701
1702  // Returns true if `this` may read something written by `other`.
1703  bool MayDependOn(SideEffects other) const {
1704    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1705    return (other.flags_ & depends_on_flags);
1706  }
1707
1708  // Returns string representation of flags (for debugging only).
1709  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1710  std::string ToString() const {
1711    std::string flags = "|";
1712    for (int s = kLastBit; s >= 0; s--) {
1713      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1714      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1715        // This is a bit for the GC side effect.
1716        if (current_bit_is_set) {
1717          flags += "GC";
1718        }
1719        flags += "|";
1720      } else {
1721        // This is a bit for the array/field analysis.
1722        // The underscore character stands for the 'can trigger GC' bit.
1723        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1724        if (current_bit_is_set) {
1725          flags += kDebug[s];
1726        }
1727        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1728            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1729          flags += "|";
1730        }
1731      }
1732    }
1733    return flags;
1734  }
1735
1736  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1737
1738 private:
1739  static constexpr int kFieldArrayAnalysisBits = 9;
1740
1741  static constexpr int kFieldWriteOffset = 0;
1742  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1743  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1744  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1745
1746  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1747
1748  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1749  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1750  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1751  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1752
1753  static constexpr int kLastBit = kDependsOnGCBit;
1754  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1755
1756  // Aliases.
1757
1758  static_assert(kChangeBits == kDependOnBits,
1759                "the 'change' bits should match the 'depend on' bits.");
1760
1761  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1762  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1763  static constexpr uint64_t kAllWrites =
1764      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1765  static constexpr uint64_t kAllReads =
1766      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1767
1768  // Translates type to bit flag.
1769  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1770    CHECK_NE(type, Primitive::kPrimVoid);
1771    const uint64_t one = 1;
1772    const int shift = type;  // 0-based consecutive enum
1773    DCHECK_LE(kFieldWriteOffset, shift);
1774    DCHECK_LT(shift, kArrayWriteOffset);
1775    return one << (type + offset);
1776  }
1777
1778  // Private constructor on direct flags value.
1779  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1780
1781  uint64_t flags_;
1782};
1783
1784// A HEnvironment object contains the values of virtual registers at a given location.
1785class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1786 public:
1787  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1788                             size_t number_of_vregs,
1789                             ArtMethod* method,
1790                             uint32_t dex_pc,
1791                             HInstruction* holder)
1792     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1793       locations_(arena->Adapter(kArenaAllocEnvironmentLocations)),
1794       parent_(nullptr),
1795       method_(method),
1796       dex_pc_(dex_pc),
1797       holder_(holder) {
1798  }
1799
1800  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1801      : HEnvironment(arena,
1802                     to_copy.Size(),
1803                     to_copy.GetMethod(),
1804                     to_copy.GetDexPc(),
1805                     holder) {}
1806
1807  void AllocateLocations() {
1808    DCHECK(locations_.empty());
1809    locations_.resize(vregs_.size());
1810  }
1811
1812  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1813    if (parent_ != nullptr) {
1814      parent_->SetAndCopyParentChain(allocator, parent);
1815    } else {
1816      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1817      parent_->CopyFrom(parent);
1818      if (parent->GetParent() != nullptr) {
1819        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1820      }
1821    }
1822  }
1823
1824  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1825  void CopyFrom(HEnvironment* environment);
1826
1827  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1828  // input to the loop phi instead. This is for inserting instructions that
1829  // require an environment (like HDeoptimization) in the loop pre-header.
1830  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1831
1832  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1833    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1834  }
1835
1836  HInstruction* GetInstructionAt(size_t index) const {
1837    return vregs_[index].GetInstruction();
1838  }
1839
1840  void RemoveAsUserOfInput(size_t index) const;
1841
1842  size_t Size() const { return vregs_.size(); }
1843
1844  HEnvironment* GetParent() const { return parent_; }
1845
1846  void SetLocationAt(size_t index, Location location) {
1847    locations_[index] = location;
1848  }
1849
1850  Location GetLocationAt(size_t index) const {
1851    return locations_[index];
1852  }
1853
1854  uint32_t GetDexPc() const {
1855    return dex_pc_;
1856  }
1857
1858  ArtMethod* GetMethod() const {
1859    return method_;
1860  }
1861
1862  HInstruction* GetHolder() const {
1863    return holder_;
1864  }
1865
1866
1867  bool IsFromInlinedInvoke() const {
1868    return GetParent() != nullptr;
1869  }
1870
1871 private:
1872  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1873  ArenaVector<Location> locations_;
1874  HEnvironment* parent_;
1875  ArtMethod* method_;
1876  const uint32_t dex_pc_;
1877
1878  // The instruction that holds this environment.
1879  HInstruction* const holder_;
1880
1881  friend class HInstruction;
1882
1883  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1884};
1885
1886class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1887 public:
1888  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1889      : previous_(nullptr),
1890        next_(nullptr),
1891        block_(nullptr),
1892        dex_pc_(dex_pc),
1893        id_(-1),
1894        ssa_index_(-1),
1895        packed_fields_(0u),
1896        environment_(nullptr),
1897        locations_(nullptr),
1898        live_interval_(nullptr),
1899        lifetime_position_(kNoLifetime),
1900        side_effects_(side_effects),
1901        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1902    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1903  }
1904
1905  virtual ~HInstruction() {}
1906
1907#define DECLARE_KIND(type, super) k##type,
1908  enum InstructionKind {
1909    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1910  };
1911#undef DECLARE_KIND
1912
1913  HInstruction* GetNext() const { return next_; }
1914  HInstruction* GetPrevious() const { return previous_; }
1915
1916  HInstruction* GetNextDisregardingMoves() const;
1917  HInstruction* GetPreviousDisregardingMoves() const;
1918
1919  HBasicBlock* GetBlock() const { return block_; }
1920  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1921  void SetBlock(HBasicBlock* block) { block_ = block; }
1922  bool IsInBlock() const { return block_ != nullptr; }
1923  bool IsInLoop() const { return block_->IsInLoop(); }
1924  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1925  bool IsIrreducibleLoopHeaderPhi() const {
1926    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1927  }
1928
1929  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1930
1931  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1932    // One virtual method is enough, just const_cast<> and then re-add the const.
1933    return ArrayRef<const HUserRecord<HInstruction*>>(
1934        const_cast<HInstruction*>(this)->GetInputRecords());
1935  }
1936
1937  HInputsRef GetInputs() {
1938    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1939  }
1940
1941  HConstInputsRef GetInputs() const {
1942    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1943  }
1944
1945  size_t InputCount() const { return GetInputRecords().size(); }
1946  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1947
1948  bool HasInput(HInstruction* input) const {
1949    for (const HInstruction* i : GetInputs()) {
1950      if (i == input) {
1951        return true;
1952      }
1953    }
1954    return false;
1955  }
1956
1957  void SetRawInputAt(size_t index, HInstruction* input) {
1958    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1959  }
1960
1961  virtual void Accept(HGraphVisitor* visitor) = 0;
1962  virtual const char* DebugName() const = 0;
1963
1964  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1965
1966  virtual bool NeedsEnvironment() const { return false; }
1967
1968  uint32_t GetDexPc() const { return dex_pc_; }
1969
1970  virtual bool IsControlFlow() const { return false; }
1971
1972  // Can the instruction throw?
1973  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1974  // could throw OOME, but it is still OK to remove them if they are unused.
1975  virtual bool CanThrow() const { return false; }
1976  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1977
1978  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1979  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1980
1981  // Does not apply for all instructions, but having this at top level greatly
1982  // simplifies the null check elimination.
1983  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1984  virtual bool CanBeNull() const {
1985    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1986    return true;
1987  }
1988
1989  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1990    return false;
1991  }
1992
1993  virtual bool IsActualObject() const {
1994    return GetType() == Primitive::kPrimNot;
1995  }
1996
1997  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1998
1999  ReferenceTypeInfo GetReferenceTypeInfo() const {
2000    DCHECK_EQ(GetType(), Primitive::kPrimNot);
2001    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2002                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
2003  }
2004
2005  void AddUseAt(HInstruction* user, size_t index) {
2006    DCHECK(user != nullptr);
2007    // Note: fixup_end remains valid across push_front().
2008    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2009    HUseListNode<HInstruction*>* new_node =
2010        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
2011    uses_.push_front(*new_node);
2012    FixUpUserRecordsAfterUseInsertion(fixup_end);
2013  }
2014
2015  void AddEnvUseAt(HEnvironment* user, size_t index) {
2016    DCHECK(user != nullptr);
2017    // Note: env_fixup_end remains valid across push_front().
2018    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2019    HUseListNode<HEnvironment*>* new_node =
2020        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2021    env_uses_.push_front(*new_node);
2022    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2023  }
2024
2025  void RemoveAsUserOfInput(size_t input) {
2026    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2027    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2028    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2029    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2030  }
2031
2032  void RemoveAsUserOfAllInputs() {
2033    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2034      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2035      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2036      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2037    }
2038  }
2039
2040  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2041  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2042
2043  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2044  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2045  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2046  bool HasOnlyOneNonEnvironmentUse() const {
2047    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2048  }
2049
2050  bool IsRemovable() const {
2051    return
2052        !DoesAnyWrite() &&
2053        !CanThrow() &&
2054        !IsSuspendCheck() &&
2055        !IsControlFlow() &&
2056        !IsNativeDebugInfo() &&
2057        !IsParameterValue() &&
2058        // If we added an explicit barrier then we should keep it.
2059        !IsMemoryBarrier() &&
2060        !IsConstructorFence();
2061  }
2062
2063  bool IsDeadAndRemovable() const {
2064    return IsRemovable() && !HasUses();
2065  }
2066
2067  // Does this instruction strictly dominate `other_instruction`?
2068  // Returns false if this instruction and `other_instruction` are the same.
2069  // Aborts if this instruction and `other_instruction` are both phis.
2070  bool StrictlyDominates(HInstruction* other_instruction) const;
2071
2072  int GetId() const { return id_; }
2073  void SetId(int id) { id_ = id; }
2074
2075  int GetSsaIndex() const { return ssa_index_; }
2076  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2077  bool HasSsaIndex() const { return ssa_index_ != -1; }
2078
2079  bool HasEnvironment() const { return environment_ != nullptr; }
2080  HEnvironment* GetEnvironment() const { return environment_; }
2081  // Set the `environment_` field. Raw because this method does not
2082  // update the uses lists.
2083  void SetRawEnvironment(HEnvironment* environment) {
2084    DCHECK(environment_ == nullptr);
2085    DCHECK_EQ(environment->GetHolder(), this);
2086    environment_ = environment;
2087  }
2088
2089  void InsertRawEnvironment(HEnvironment* environment) {
2090    DCHECK(environment_ != nullptr);
2091    DCHECK_EQ(environment->GetHolder(), this);
2092    DCHECK(environment->GetParent() == nullptr);
2093    environment->parent_ = environment_;
2094    environment_ = environment;
2095  }
2096
2097  void RemoveEnvironment();
2098
2099  // Set the environment of this instruction, copying it from `environment`. While
2100  // copying, the uses lists are being updated.
2101  void CopyEnvironmentFrom(HEnvironment* environment) {
2102    DCHECK(environment_ == nullptr);
2103    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2104    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2105    environment_->CopyFrom(environment);
2106    if (environment->GetParent() != nullptr) {
2107      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2108    }
2109  }
2110
2111  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2112                                                HBasicBlock* block) {
2113    DCHECK(environment_ == nullptr);
2114    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2115    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2116    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2117    if (environment->GetParent() != nullptr) {
2118      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2119    }
2120  }
2121
2122  // Returns the number of entries in the environment. Typically, that is the
2123  // number of dex registers in a method. It could be more in case of inlining.
2124  size_t EnvironmentSize() const;
2125
2126  LocationSummary* GetLocations() const { return locations_; }
2127  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2128
2129  void ReplaceWith(HInstruction* instruction);
2130  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2131  void ReplaceInput(HInstruction* replacement, size_t index);
2132
2133  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2134  // uses of this instruction by `other` are *not* updated.
2135  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2136    ReplaceWith(other);
2137    other->ReplaceInput(this, use_index);
2138  }
2139
2140  // Move `this` instruction before `cursor`
2141  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2142
2143  // Move `this` before its first user and out of any loops. If there is no
2144  // out-of-loop user that dominates all other users, move the instruction
2145  // to the end of the out-of-loop common dominator of the user's blocks.
2146  //
2147  // This can be used only on non-throwing instructions with no side effects that
2148  // have at least one use but no environment uses.
2149  void MoveBeforeFirstUserAndOutOfLoops();
2150
2151#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2152  bool Is##type() const;                                                       \
2153  const H##type* As##type() const;                                             \
2154  H##type* As##type();
2155
2156  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2157#undef INSTRUCTION_TYPE_CHECK
2158
2159#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2160  bool Is##type() const { return (As##type() != nullptr); }                    \
2161  virtual const H##type* As##type() const { return nullptr; }                  \
2162  virtual H##type* As##type() { return nullptr; }
2163  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2164#undef INSTRUCTION_TYPE_CHECK
2165
2166  // Returns whether the instruction can be moved within the graph.
2167  // TODO: this method is used by LICM and GVN with possibly different
2168  //       meanings? split and rename?
2169  virtual bool CanBeMoved() const { return false; }
2170
2171  // Returns whether the two instructions are of the same kind.
2172  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2173    return false;
2174  }
2175
2176  // Returns whether any data encoded in the two instructions is equal.
2177  // This method does not look at the inputs. Both instructions must be
2178  // of the same type, otherwise the method has undefined behavior.
2179  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2180    return false;
2181  }
2182
2183  // Returns whether two instructions are equal, that is:
2184  // 1) They have the same type and contain the same data (InstructionDataEquals).
2185  // 2) Their inputs are identical.
2186  bool Equals(const HInstruction* other) const;
2187
2188  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2189  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2190  // the virtual function because the __attribute__((__pure__)) doesn't really
2191  // apply the strong requirement for virtual functions, preventing optimizations.
2192  InstructionKind GetKind() const PURE;
2193  virtual InstructionKind GetKindInternal() const = 0;
2194
2195  virtual size_t ComputeHashCode() const {
2196    size_t result = GetKind();
2197    for (const HInstruction* input : GetInputs()) {
2198      result = (result * 31) + input->GetId();
2199    }
2200    return result;
2201  }
2202
2203  SideEffects GetSideEffects() const { return side_effects_; }
2204  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2205  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2206
2207  size_t GetLifetimePosition() const { return lifetime_position_; }
2208  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2209  LiveInterval* GetLiveInterval() const { return live_interval_; }
2210  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2211  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2212
2213  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2214
2215  // Returns whether the code generation of the instruction will require to have access
2216  // to the current method. Such instructions are:
2217  // (1): Instructions that require an environment, as calling the runtime requires
2218  //      to walk the stack and have the current method stored at a specific stack address.
2219  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2220  //      to access the dex cache.
2221  bool NeedsCurrentMethod() const {
2222    return NeedsEnvironment() || IsCurrentMethod();
2223  }
2224
2225  // Returns whether the code generation of the instruction will require to have access
2226  // to the dex cache of the current method's declaring class via the current method.
2227  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2228
2229  // Does this instruction have any use in an environment before
2230  // control flow hits 'other'?
2231  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2232
2233  // Remove all references to environment uses of this instruction.
2234  // The caller must ensure that this is safe to do.
2235  void RemoveEnvironmentUsers();
2236
2237  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2238  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2239
2240 protected:
2241  // If set, the machine code for this instruction is assumed to be generated by
2242  // its users. Used by liveness analysis to compute use positions accordingly.
2243  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2244  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2245  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2246  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2247
2248  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2249    return GetInputRecords()[i];
2250  }
2251
2252  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2253    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2254    input_records[index] = input;
2255  }
2256
2257  uint32_t GetPackedFields() const {
2258    return packed_fields_;
2259  }
2260
2261  template <size_t flag>
2262  bool GetPackedFlag() const {
2263    return (packed_fields_ & (1u << flag)) != 0u;
2264  }
2265
2266  template <size_t flag>
2267  void SetPackedFlag(bool value = true) {
2268    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2269  }
2270
2271  template <typename BitFieldType>
2272  typename BitFieldType::value_type GetPackedField() const {
2273    return BitFieldType::Decode(packed_fields_);
2274  }
2275
2276  template <typename BitFieldType>
2277  void SetPackedField(typename BitFieldType::value_type value) {
2278    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2279    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2280  }
2281
2282 private:
2283  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2284    auto before_use_node = uses_.before_begin();
2285    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2286      HInstruction* user = use_node->GetUser();
2287      size_t input_index = use_node->GetIndex();
2288      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2289      before_use_node = use_node;
2290    }
2291  }
2292
2293  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2294    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2295    if (next != uses_.end()) {
2296      HInstruction* next_user = next->GetUser();
2297      size_t next_index = next->GetIndex();
2298      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2299      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2300    }
2301  }
2302
2303  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2304    auto before_env_use_node = env_uses_.before_begin();
2305    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2306      HEnvironment* user = env_use_node->GetUser();
2307      size_t input_index = env_use_node->GetIndex();
2308      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2309      before_env_use_node = env_use_node;
2310    }
2311  }
2312
2313  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2314    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2315    if (next != env_uses_.end()) {
2316      HEnvironment* next_user = next->GetUser();
2317      size_t next_index = next->GetIndex();
2318      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2319      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2320    }
2321  }
2322
2323  HInstruction* previous_;
2324  HInstruction* next_;
2325  HBasicBlock* block_;
2326  const uint32_t dex_pc_;
2327
2328  // An instruction gets an id when it is added to the graph.
2329  // It reflects creation order. A negative id means the instruction
2330  // has not been added to the graph.
2331  int id_;
2332
2333  // When doing liveness analysis, instructions that have uses get an SSA index.
2334  int ssa_index_;
2335
2336  // Packed fields.
2337  uint32_t packed_fields_;
2338
2339  // List of instructions that have this instruction as input.
2340  HUseList<HInstruction*> uses_;
2341
2342  // List of environments that contain this instruction.
2343  HUseList<HEnvironment*> env_uses_;
2344
2345  // The environment associated with this instruction. Not null if the instruction
2346  // might jump out of the method.
2347  HEnvironment* environment_;
2348
2349  // Set by the code generator.
2350  LocationSummary* locations_;
2351
2352  // Set by the liveness analysis.
2353  LiveInterval* live_interval_;
2354
2355  // Set by the liveness analysis, this is the position in a linear
2356  // order of blocks where this instruction's live interval start.
2357  size_t lifetime_position_;
2358
2359  SideEffects side_effects_;
2360
2361  // The reference handle part of the reference type info.
2362  // The IsExact() flag is stored in packed fields.
2363  // TODO: for primitive types this should be marked as invalid.
2364  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2365
2366  friend class GraphChecker;
2367  friend class HBasicBlock;
2368  friend class HEnvironment;
2369  friend class HGraph;
2370  friend class HInstructionList;
2371
2372  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2373};
2374std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2375
2376// Iterates over the instructions, while preserving the next instruction
2377// in case the current instruction gets removed from the list by the user
2378// of this iterator.
2379class HInstructionIterator : public ValueObject {
2380 public:
2381  explicit HInstructionIterator(const HInstructionList& instructions)
2382      : instruction_(instructions.first_instruction_) {
2383    next_ = Done() ? nullptr : instruction_->GetNext();
2384  }
2385
2386  bool Done() const { return instruction_ == nullptr; }
2387  HInstruction* Current() const { return instruction_; }
2388  void Advance() {
2389    instruction_ = next_;
2390    next_ = Done() ? nullptr : instruction_->GetNext();
2391  }
2392
2393 private:
2394  HInstruction* instruction_;
2395  HInstruction* next_;
2396
2397  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2398};
2399
2400// Iterates over the instructions without saving the next instruction,
2401// therefore handling changes in the graph potentially made by the user
2402// of this iterator.
2403class HInstructionIteratorHandleChanges : public ValueObject {
2404 public:
2405  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2406      : instruction_(instructions.first_instruction_) {
2407  }
2408
2409  bool Done() const { return instruction_ == nullptr; }
2410  HInstruction* Current() const { return instruction_; }
2411  void Advance() {
2412    instruction_ = instruction_->GetNext();
2413  }
2414
2415 private:
2416  HInstruction* instruction_;
2417
2418  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2419};
2420
2421
2422class HBackwardInstructionIterator : public ValueObject {
2423 public:
2424  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2425      : instruction_(instructions.last_instruction_) {
2426    next_ = Done() ? nullptr : instruction_->GetPrevious();
2427  }
2428
2429  bool Done() const { return instruction_ == nullptr; }
2430  HInstruction* Current() const { return instruction_; }
2431  void Advance() {
2432    instruction_ = next_;
2433    next_ = Done() ? nullptr : instruction_->GetPrevious();
2434  }
2435
2436 private:
2437  HInstruction* instruction_;
2438  HInstruction* next_;
2439
2440  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2441};
2442
2443class HVariableInputSizeInstruction : public HInstruction {
2444 public:
2445  using HInstruction::GetInputRecords;  // Keep the const version visible.
2446  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2447    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2448  }
2449
2450  void AddInput(HInstruction* input);
2451  void InsertInputAt(size_t index, HInstruction* input);
2452  void RemoveInputAt(size_t index);
2453
2454  // Removes all the inputs.
2455  // Also removes this instructions from each input's use list
2456  // (for non-environment uses only).
2457  void RemoveAllInputs();
2458
2459 protected:
2460  HVariableInputSizeInstruction(SideEffects side_effects,
2461                                uint32_t dex_pc,
2462                                ArenaAllocator* arena,
2463                                size_t number_of_inputs,
2464                                ArenaAllocKind kind)
2465      : HInstruction(side_effects, dex_pc),
2466        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2467
2468  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2469
2470 private:
2471  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2472};
2473
2474template<size_t N>
2475class HTemplateInstruction: public HInstruction {
2476 public:
2477  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2478      : HInstruction(side_effects, dex_pc), inputs_() {}
2479  virtual ~HTemplateInstruction() {}
2480
2481  using HInstruction::GetInputRecords;  // Keep the const version visible.
2482  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2483    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2484  }
2485
2486 private:
2487  std::array<HUserRecord<HInstruction*>, N> inputs_;
2488
2489  friend class SsaBuilder;
2490};
2491
2492// HTemplateInstruction specialization for N=0.
2493template<>
2494class HTemplateInstruction<0>: public HInstruction {
2495 public:
2496  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2497      : HInstruction(side_effects, dex_pc) {}
2498
2499  virtual ~HTemplateInstruction() {}
2500
2501  using HInstruction::GetInputRecords;  // Keep the const version visible.
2502  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2503    return ArrayRef<HUserRecord<HInstruction*>>();
2504  }
2505
2506 private:
2507  friend class SsaBuilder;
2508};
2509
2510template<intptr_t N>
2511class HExpression : public HTemplateInstruction<N> {
2512 public:
2513  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2514      : HTemplateInstruction<N>(side_effects, dex_pc) {
2515    this->template SetPackedField<TypeField>(type);
2516  }
2517  virtual ~HExpression() {}
2518
2519  Primitive::Type GetType() const OVERRIDE {
2520    return TypeField::Decode(this->GetPackedFields());
2521  }
2522
2523 protected:
2524  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2525  static constexpr size_t kFieldTypeSize =
2526      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2527  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2528  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2529                "Too many packed fields.");
2530  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2531};
2532
2533// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2534// instruction that branches to the exit block.
2535class HReturnVoid FINAL : public HTemplateInstruction<0> {
2536 public:
2537  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2538      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2539
2540  bool IsControlFlow() const OVERRIDE { return true; }
2541
2542  DECLARE_INSTRUCTION(ReturnVoid);
2543
2544 private:
2545  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2546};
2547
2548// Represents dex's RETURN opcodes. A HReturn is a control flow
2549// instruction that branches to the exit block.
2550class HReturn FINAL : public HTemplateInstruction<1> {
2551 public:
2552  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2553      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2554    SetRawInputAt(0, value);
2555  }
2556
2557  bool IsControlFlow() const OVERRIDE { return true; }
2558
2559  DECLARE_INSTRUCTION(Return);
2560
2561 private:
2562  DISALLOW_COPY_AND_ASSIGN(HReturn);
2563};
2564
2565class HPhi FINAL : public HVariableInputSizeInstruction {
2566 public:
2567  HPhi(ArenaAllocator* arena,
2568       uint32_t reg_number,
2569       size_t number_of_inputs,
2570       Primitive::Type type,
2571       uint32_t dex_pc = kNoDexPc)
2572      : HVariableInputSizeInstruction(
2573            SideEffects::None(),
2574            dex_pc,
2575            arena,
2576            number_of_inputs,
2577            kArenaAllocPhiInputs),
2578        reg_number_(reg_number) {
2579    SetPackedField<TypeField>(ToPhiType(type));
2580    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2581    // Phis are constructed live and marked dead if conflicting or unused.
2582    // Individual steps of SsaBuilder should assume that if a phi has been
2583    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2584    SetPackedFlag<kFlagIsLive>(true);
2585    SetPackedFlag<kFlagCanBeNull>(true);
2586  }
2587
2588  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2589  static Primitive::Type ToPhiType(Primitive::Type type) {
2590    return Primitive::PrimitiveKind(type);
2591  }
2592
2593  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2594
2595  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2596  void SetType(Primitive::Type new_type) {
2597    // Make sure that only valid type changes occur. The following are allowed:
2598    //  (1) int  -> float/ref (primitive type propagation),
2599    //  (2) long -> double (primitive type propagation).
2600    DCHECK(GetType() == new_type ||
2601           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2602           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2603           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2604    SetPackedField<TypeField>(new_type);
2605  }
2606
2607  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2608  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2609
2610  uint32_t GetRegNumber() const { return reg_number_; }
2611
2612  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2613  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2614  bool IsDead() const { return !IsLive(); }
2615  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2616
2617  bool IsVRegEquivalentOf(const HInstruction* other) const {
2618    return other != nullptr
2619        && other->IsPhi()
2620        && other->AsPhi()->GetBlock() == GetBlock()
2621        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2622  }
2623
2624  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2625  // An equivalent phi is a phi having the same dex register and type.
2626  // It assumes that phis with the same dex register are adjacent.
2627  HPhi* GetNextEquivalentPhiWithSameType() {
2628    HInstruction* next = GetNext();
2629    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2630      if (next->GetType() == GetType()) {
2631        return next->AsPhi();
2632      }
2633      next = next->GetNext();
2634    }
2635    return nullptr;
2636  }
2637
2638  DECLARE_INSTRUCTION(Phi);
2639
2640 private:
2641  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2642  static constexpr size_t kFieldTypeSize =
2643      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2644  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2645  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2646  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2647  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2648  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2649
2650  const uint32_t reg_number_;
2651
2652  DISALLOW_COPY_AND_ASSIGN(HPhi);
2653};
2654
2655// The exit instruction is the only instruction of the exit block.
2656// Instructions aborting the method (HThrow and HReturn) must branch to the
2657// exit block.
2658class HExit FINAL : public HTemplateInstruction<0> {
2659 public:
2660  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2661
2662  bool IsControlFlow() const OVERRIDE { return true; }
2663
2664  DECLARE_INSTRUCTION(Exit);
2665
2666 private:
2667  DISALLOW_COPY_AND_ASSIGN(HExit);
2668};
2669
2670// Jumps from one block to another.
2671class HGoto FINAL : public HTemplateInstruction<0> {
2672 public:
2673  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2674
2675  bool IsControlFlow() const OVERRIDE { return true; }
2676
2677  HBasicBlock* GetSuccessor() const {
2678    return GetBlock()->GetSingleSuccessor();
2679  }
2680
2681  DECLARE_INSTRUCTION(Goto);
2682
2683 private:
2684  DISALLOW_COPY_AND_ASSIGN(HGoto);
2685};
2686
2687class HConstant : public HExpression<0> {
2688 public:
2689  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2690      : HExpression(type, SideEffects::None(), dex_pc) {}
2691
2692  bool CanBeMoved() const OVERRIDE { return true; }
2693
2694  // Is this constant -1 in the arithmetic sense?
2695  virtual bool IsMinusOne() const { return false; }
2696  // Is this constant 0 in the arithmetic sense?
2697  virtual bool IsArithmeticZero() const { return false; }
2698  // Is this constant a 0-bit pattern?
2699  virtual bool IsZeroBitPattern() const { return false; }
2700  // Is this constant 1 in the arithmetic sense?
2701  virtual bool IsOne() const { return false; }
2702
2703  virtual uint64_t GetValueAsUint64() const = 0;
2704
2705  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2706
2707 private:
2708  DISALLOW_COPY_AND_ASSIGN(HConstant);
2709};
2710
2711class HNullConstant FINAL : public HConstant {
2712 public:
2713  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2714    return true;
2715  }
2716
2717  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2718
2719  size_t ComputeHashCode() const OVERRIDE { return 0; }
2720
2721  // The null constant representation is a 0-bit pattern.
2722  virtual bool IsZeroBitPattern() const { return true; }
2723
2724  DECLARE_INSTRUCTION(NullConstant);
2725
2726 private:
2727  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2728
2729  friend class HGraph;
2730  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2731};
2732
2733// Constants of the type int. Those can be from Dex instructions, or
2734// synthesized (for example with the if-eqz instruction).
2735class HIntConstant FINAL : public HConstant {
2736 public:
2737  int32_t GetValue() const { return value_; }
2738
2739  uint64_t GetValueAsUint64() const OVERRIDE {
2740    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2741  }
2742
2743  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2744    DCHECK(other->IsIntConstant()) << other->DebugName();
2745    return other->AsIntConstant()->value_ == value_;
2746  }
2747
2748  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2749
2750  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2751  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2752  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2753  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2754
2755  // Integer constants are used to encode Boolean values as well,
2756  // where 1 means true and 0 means false.
2757  bool IsTrue() const { return GetValue() == 1; }
2758  bool IsFalse() const { return GetValue() == 0; }
2759
2760  DECLARE_INSTRUCTION(IntConstant);
2761
2762 private:
2763  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2764      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2765  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2766      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2767
2768  const int32_t value_;
2769
2770  friend class HGraph;
2771  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2772  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2773  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2774};
2775
2776class HLongConstant FINAL : public HConstant {
2777 public:
2778  int64_t GetValue() const { return value_; }
2779
2780  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2781
2782  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2783    DCHECK(other->IsLongConstant()) << other->DebugName();
2784    return other->AsLongConstant()->value_ == value_;
2785  }
2786
2787  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2788
2789  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2790  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2791  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2792  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2793
2794  DECLARE_INSTRUCTION(LongConstant);
2795
2796 private:
2797  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2798      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2799
2800  const int64_t value_;
2801
2802  friend class HGraph;
2803  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2804};
2805
2806class HFloatConstant FINAL : public HConstant {
2807 public:
2808  float GetValue() const { return value_; }
2809
2810  uint64_t GetValueAsUint64() const OVERRIDE {
2811    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2812  }
2813
2814  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2815    DCHECK(other->IsFloatConstant()) << other->DebugName();
2816    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2817  }
2818
2819  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2820
2821  bool IsMinusOne() const OVERRIDE {
2822    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2823  }
2824  bool IsArithmeticZero() const OVERRIDE {
2825    return std::fpclassify(value_) == FP_ZERO;
2826  }
2827  bool IsArithmeticPositiveZero() const {
2828    return IsArithmeticZero() && !std::signbit(value_);
2829  }
2830  bool IsArithmeticNegativeZero() const {
2831    return IsArithmeticZero() && std::signbit(value_);
2832  }
2833  bool IsZeroBitPattern() const OVERRIDE {
2834    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2835  }
2836  bool IsOne() const OVERRIDE {
2837    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2838  }
2839  bool IsNaN() const {
2840    return std::isnan(value_);
2841  }
2842
2843  DECLARE_INSTRUCTION(FloatConstant);
2844
2845 private:
2846  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2847      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2848  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2849      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2850
2851  const float value_;
2852
2853  // Only the SsaBuilder and HGraph can create floating-point constants.
2854  friend class SsaBuilder;
2855  friend class HGraph;
2856  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2857};
2858
2859class HDoubleConstant FINAL : public HConstant {
2860 public:
2861  double GetValue() const { return value_; }
2862
2863  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2864
2865  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2866    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2867    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2868  }
2869
2870  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2871
2872  bool IsMinusOne() const OVERRIDE {
2873    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2874  }
2875  bool IsArithmeticZero() const OVERRIDE {
2876    return std::fpclassify(value_) == FP_ZERO;
2877  }
2878  bool IsArithmeticPositiveZero() const {
2879    return IsArithmeticZero() && !std::signbit(value_);
2880  }
2881  bool IsArithmeticNegativeZero() const {
2882    return IsArithmeticZero() && std::signbit(value_);
2883  }
2884  bool IsZeroBitPattern() const OVERRIDE {
2885    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2886  }
2887  bool IsOne() const OVERRIDE {
2888    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2889  }
2890  bool IsNaN() const {
2891    return std::isnan(value_);
2892  }
2893
2894  DECLARE_INSTRUCTION(DoubleConstant);
2895
2896 private:
2897  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2898      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2899  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2900      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2901
2902  const double value_;
2903
2904  // Only the SsaBuilder and HGraph can create floating-point constants.
2905  friend class SsaBuilder;
2906  friend class HGraph;
2907  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2908};
2909
2910// Conditional branch. A block ending with an HIf instruction must have
2911// two successors.
2912class HIf FINAL : public HTemplateInstruction<1> {
2913 public:
2914  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2915      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2916    SetRawInputAt(0, input);
2917  }
2918
2919  bool IsControlFlow() const OVERRIDE { return true; }
2920
2921  HBasicBlock* IfTrueSuccessor() const {
2922    return GetBlock()->GetSuccessors()[0];
2923  }
2924
2925  HBasicBlock* IfFalseSuccessor() const {
2926    return GetBlock()->GetSuccessors()[1];
2927  }
2928
2929  DECLARE_INSTRUCTION(If);
2930
2931 private:
2932  DISALLOW_COPY_AND_ASSIGN(HIf);
2933};
2934
2935
2936// Abstract instruction which marks the beginning and/or end of a try block and
2937// links it to the respective exception handlers. Behaves the same as a Goto in
2938// non-exceptional control flow.
2939// Normal-flow successor is stored at index zero, exception handlers under
2940// higher indices in no particular order.
2941class HTryBoundary FINAL : public HTemplateInstruction<0> {
2942 public:
2943  enum class BoundaryKind {
2944    kEntry,
2945    kExit,
2946    kLast = kExit
2947  };
2948
2949  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2950      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2951    SetPackedField<BoundaryKindField>(kind);
2952  }
2953
2954  bool IsControlFlow() const OVERRIDE { return true; }
2955
2956  // Returns the block's non-exceptional successor (index zero).
2957  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2958
2959  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2960    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2961  }
2962
2963  // Returns whether `handler` is among its exception handlers (non-zero index
2964  // successors).
2965  bool HasExceptionHandler(const HBasicBlock& handler) const {
2966    DCHECK(handler.IsCatchBlock());
2967    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2968  }
2969
2970  // If not present already, adds `handler` to its block's list of exception
2971  // handlers.
2972  void AddExceptionHandler(HBasicBlock* handler) {
2973    if (!HasExceptionHandler(*handler)) {
2974      GetBlock()->AddSuccessor(handler);
2975    }
2976  }
2977
2978  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2979  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2980
2981  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2982
2983  DECLARE_INSTRUCTION(TryBoundary);
2984
2985 private:
2986  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2987  static constexpr size_t kFieldBoundaryKindSize =
2988      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2989  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2990      kFieldBoundaryKind + kFieldBoundaryKindSize;
2991  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2992                "Too many packed fields.");
2993  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2994
2995  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2996};
2997
2998// Deoptimize to interpreter, upon checking a condition.
2999class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3000 public:
3001  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3002  // across.
3003  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc)
3004      : HVariableInputSizeInstruction(
3005            SideEffects::All(),
3006            dex_pc,
3007            arena,
3008            /* number_of_inputs */ 1,
3009            kArenaAllocMisc) {
3010    SetPackedFlag<kFieldCanBeMoved>(false);
3011    SetPackedField<DeoptimizeKindField>(kind);
3012    SetRawInputAt(0, cond);
3013  }
3014
3015  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3016  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3017  // instead of `guard`.
3018  // We set CanTriggerGC to prevent any intermediate address to be live
3019  // at the point of the `HDeoptimize`.
3020  HDeoptimize(ArenaAllocator* arena,
3021              HInstruction* cond,
3022              HInstruction* guard,
3023              DeoptimizationKind kind,
3024              uint32_t dex_pc)
3025      : HVariableInputSizeInstruction(
3026            SideEffects::CanTriggerGC(),
3027            dex_pc,
3028            arena,
3029            /* number_of_inputs */ 2,
3030            kArenaAllocMisc) {
3031    SetPackedFlag<kFieldCanBeMoved>(true);
3032    SetPackedField<DeoptimizeKindField>(kind);
3033    SetRawInputAt(0, cond);
3034    SetRawInputAt(1, guard);
3035  }
3036
3037  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3038
3039  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3040    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3041  }
3042
3043  bool NeedsEnvironment() const OVERRIDE { return true; }
3044
3045  bool CanThrow() const OVERRIDE { return true; }
3046
3047  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3048
3049  Primitive::Type GetType() const OVERRIDE {
3050    return GuardsAnInput() ? GuardedInput()->GetType() : Primitive::kPrimVoid;
3051  }
3052
3053  bool GuardsAnInput() const {
3054    return InputCount() == 2;
3055  }
3056
3057  HInstruction* GuardedInput() const {
3058    DCHECK(GuardsAnInput());
3059    return InputAt(1);
3060  }
3061
3062  void RemoveGuard() {
3063    RemoveInputAt(1);
3064  }
3065
3066  DECLARE_INSTRUCTION(Deoptimize);
3067
3068 private:
3069  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3070  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3071  static constexpr size_t kFieldDeoptimizeKindSize =
3072      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3073  static constexpr size_t kNumberOfDeoptimizePackedBits =
3074      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3075  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3076                "Too many packed fields.");
3077  using DeoptimizeKindField =
3078      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3079
3080  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3081};
3082
3083// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3084// The compiled code checks this flag value in a guard before devirtualized call and
3085// if it's true, starts to do deoptimization.
3086// It has a 4-byte slot on stack.
3087// TODO: allocate a register for this flag.
3088class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3089 public:
3090  // CHA guards are only optimized in a separate pass and it has no side effects
3091  // with regard to other passes.
3092  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3093      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3094  }
3095
3096  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
3097
3098  // We do all CHA guard elimination/motion in a single pass, after which there is no
3099  // further guard elimination/motion since a guard might have been used for justification
3100  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3101  // to avoid other optimizations trying to move it.
3102  bool CanBeMoved() const OVERRIDE { return false; }
3103
3104  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3105
3106 private:
3107  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3108};
3109
3110// Represents the ArtMethod that was passed as a first argument to
3111// the method. It is used by instructions that depend on it, like
3112// instructions that work with the dex cache.
3113class HCurrentMethod FINAL : public HExpression<0> {
3114 public:
3115  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
3116      : HExpression(type, SideEffects::None(), dex_pc) {}
3117
3118  DECLARE_INSTRUCTION(CurrentMethod);
3119
3120 private:
3121  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3122};
3123
3124// Fetches an ArtMethod from the virtual table or the interface method table
3125// of a class.
3126class HClassTableGet FINAL : public HExpression<1> {
3127 public:
3128  enum class TableKind {
3129    kVTable,
3130    kIMTable,
3131    kLast = kIMTable
3132  };
3133  HClassTableGet(HInstruction* cls,
3134                 Primitive::Type type,
3135                 TableKind kind,
3136                 size_t index,
3137                 uint32_t dex_pc)
3138      : HExpression(type, SideEffects::None(), dex_pc),
3139        index_(index) {
3140    SetPackedField<TableKindField>(kind);
3141    SetRawInputAt(0, cls);
3142  }
3143
3144  bool CanBeMoved() const OVERRIDE { return true; }
3145  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3146    return other->AsClassTableGet()->GetIndex() == index_ &&
3147        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3148  }
3149
3150  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3151  size_t GetIndex() const { return index_; }
3152
3153  DECLARE_INSTRUCTION(ClassTableGet);
3154
3155 private:
3156  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3157  static constexpr size_t kFieldTableKindSize =
3158      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3159  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3160  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3161                "Too many packed fields.");
3162  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3163
3164  // The index of the ArtMethod in the table.
3165  const size_t index_;
3166
3167  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3168};
3169
3170// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3171// have one successor for each entry in the switch table, and the final successor
3172// will be the block containing the next Dex opcode.
3173class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3174 public:
3175  HPackedSwitch(int32_t start_value,
3176                uint32_t num_entries,
3177                HInstruction* input,
3178                uint32_t dex_pc = kNoDexPc)
3179    : HTemplateInstruction(SideEffects::None(), dex_pc),
3180      start_value_(start_value),
3181      num_entries_(num_entries) {
3182    SetRawInputAt(0, input);
3183  }
3184
3185  bool IsControlFlow() const OVERRIDE { return true; }
3186
3187  int32_t GetStartValue() const { return start_value_; }
3188
3189  uint32_t GetNumEntries() const { return num_entries_; }
3190
3191  HBasicBlock* GetDefaultBlock() const {
3192    // Last entry is the default block.
3193    return GetBlock()->GetSuccessors()[num_entries_];
3194  }
3195  DECLARE_INSTRUCTION(PackedSwitch);
3196
3197 private:
3198  const int32_t start_value_;
3199  const uint32_t num_entries_;
3200
3201  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3202};
3203
3204class HUnaryOperation : public HExpression<1> {
3205 public:
3206  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3207      : HExpression(result_type, SideEffects::None(), dex_pc) {
3208    SetRawInputAt(0, input);
3209  }
3210
3211  HInstruction* GetInput() const { return InputAt(0); }
3212  Primitive::Type GetResultType() const { return GetType(); }
3213
3214  bool CanBeMoved() const OVERRIDE { return true; }
3215  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3216    return true;
3217  }
3218
3219  // Try to statically evaluate `this` and return a HConstant
3220  // containing the result of this evaluation.  If `this` cannot
3221  // be evaluated as a constant, return null.
3222  HConstant* TryStaticEvaluation() const;
3223
3224  // Apply this operation to `x`.
3225  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3226  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3227  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3228  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3229
3230  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3231
3232 private:
3233  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3234};
3235
3236class HBinaryOperation : public HExpression<2> {
3237 public:
3238  HBinaryOperation(Primitive::Type result_type,
3239                   HInstruction* left,
3240                   HInstruction* right,
3241                   SideEffects side_effects = SideEffects::None(),
3242                   uint32_t dex_pc = kNoDexPc)
3243      : HExpression(result_type, side_effects, dex_pc) {
3244    SetRawInputAt(0, left);
3245    SetRawInputAt(1, right);
3246  }
3247
3248  HInstruction* GetLeft() const { return InputAt(0); }
3249  HInstruction* GetRight() const { return InputAt(1); }
3250  Primitive::Type GetResultType() const { return GetType(); }
3251
3252  virtual bool IsCommutative() const { return false; }
3253
3254  // Put constant on the right.
3255  // Returns whether order is changed.
3256  bool OrderInputsWithConstantOnTheRight() {
3257    HInstruction* left = InputAt(0);
3258    HInstruction* right = InputAt(1);
3259    if (left->IsConstant() && !right->IsConstant()) {
3260      ReplaceInput(right, 0);
3261      ReplaceInput(left, 1);
3262      return true;
3263    }
3264    return false;
3265  }
3266
3267  // Order inputs by instruction id, but favor constant on the right side.
3268  // This helps GVN for commutative ops.
3269  void OrderInputs() {
3270    DCHECK(IsCommutative());
3271    HInstruction* left = InputAt(0);
3272    HInstruction* right = InputAt(1);
3273    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3274      return;
3275    }
3276    if (OrderInputsWithConstantOnTheRight()) {
3277      return;
3278    }
3279    // Order according to instruction id.
3280    if (left->GetId() > right->GetId()) {
3281      ReplaceInput(right, 0);
3282      ReplaceInput(left, 1);
3283    }
3284  }
3285
3286  bool CanBeMoved() const OVERRIDE { return true; }
3287  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3288    return true;
3289  }
3290
3291  // Try to statically evaluate `this` and return a HConstant
3292  // containing the result of this evaluation.  If `this` cannot
3293  // be evaluated as a constant, return null.
3294  HConstant* TryStaticEvaluation() const;
3295
3296  // Apply this operation to `x` and `y`.
3297  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3298                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3299    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3300    UNREACHABLE();
3301  }
3302  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3303  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3304  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3305                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3306    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3307    UNREACHABLE();
3308  }
3309  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3310  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3311
3312  // Returns an input that can legally be used as the right input and is
3313  // constant, or null.
3314  HConstant* GetConstantRight() const;
3315
3316  // If `GetConstantRight()` returns one of the input, this returns the other
3317  // one. Otherwise it returns null.
3318  HInstruction* GetLeastConstantLeft() const;
3319
3320  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3321
3322 private:
3323  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3324};
3325
3326// The comparison bias applies for floating point operations and indicates how NaN
3327// comparisons are treated:
3328enum class ComparisonBias {
3329  kNoBias,  // bias is not applicable (i.e. for long operation)
3330  kGtBias,  // return 1 for NaN comparisons
3331  kLtBias,  // return -1 for NaN comparisons
3332  kLast = kLtBias
3333};
3334
3335std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3336
3337class HCondition : public HBinaryOperation {
3338 public:
3339  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3340      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3341    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3342  }
3343
3344  // For code generation purposes, returns whether this instruction is just before
3345  // `instruction`, and disregard moves in between.
3346  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3347
3348  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3349
3350  virtual IfCondition GetCondition() const = 0;
3351
3352  virtual IfCondition GetOppositeCondition() const = 0;
3353
3354  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3355  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3356
3357  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3358  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3359
3360  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3361    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3362  }
3363
3364  bool IsFPConditionTrueIfNaN() const {
3365    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3366    IfCondition if_cond = GetCondition();
3367    if (if_cond == kCondNE) {
3368      return true;
3369    } else if (if_cond == kCondEQ) {
3370      return false;
3371    }
3372    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3373  }
3374
3375  bool IsFPConditionFalseIfNaN() const {
3376    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3377    IfCondition if_cond = GetCondition();
3378    if (if_cond == kCondEQ) {
3379      return true;
3380    } else if (if_cond == kCondNE) {
3381      return false;
3382    }
3383    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3384  }
3385
3386 protected:
3387  // Needed if we merge a HCompare into a HCondition.
3388  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3389  static constexpr size_t kFieldComparisonBiasSize =
3390      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3391  static constexpr size_t kNumberOfConditionPackedBits =
3392      kFieldComparisonBias + kFieldComparisonBiasSize;
3393  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3394  using ComparisonBiasField =
3395      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3396
3397  template <typename T>
3398  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3399
3400  template <typename T>
3401  int32_t CompareFP(T x, T y) const {
3402    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3403    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3404    // Handle the bias.
3405    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3406  }
3407
3408  // Return an integer constant containing the result of a condition evaluated at compile time.
3409  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3410    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3411  }
3412
3413 private:
3414  DISALLOW_COPY_AND_ASSIGN(HCondition);
3415};
3416
3417// Instruction to check if two inputs are equal to each other.
3418class HEqual FINAL : public HCondition {
3419 public:
3420  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3421      : HCondition(first, second, dex_pc) {}
3422
3423  bool IsCommutative() const OVERRIDE { return true; }
3424
3425  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3426                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3427    return MakeConstantCondition(true, GetDexPc());
3428  }
3429  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3430    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3431  }
3432  // In the following Evaluate methods, a HCompare instruction has
3433  // been merged into this HEqual instruction; evaluate it as
3434  // `Compare(x, y) == 0`.
3435  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3436    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3437                                 GetDexPc());
3438  }
3439  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3440    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3441  }
3442  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3443    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3444  }
3445
3446  DECLARE_INSTRUCTION(Equal);
3447
3448  IfCondition GetCondition() const OVERRIDE {
3449    return kCondEQ;
3450  }
3451
3452  IfCondition GetOppositeCondition() const OVERRIDE {
3453    return kCondNE;
3454  }
3455
3456 private:
3457  template <typename T> static bool Compute(T x, T y) { return x == y; }
3458
3459  DISALLOW_COPY_AND_ASSIGN(HEqual);
3460};
3461
3462class HNotEqual FINAL : public HCondition {
3463 public:
3464  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3465      : HCondition(first, second, dex_pc) {}
3466
3467  bool IsCommutative() const OVERRIDE { return true; }
3468
3469  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3470                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3471    return MakeConstantCondition(false, GetDexPc());
3472  }
3473  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3474    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3475  }
3476  // In the following Evaluate methods, a HCompare instruction has
3477  // been merged into this HNotEqual instruction; evaluate it as
3478  // `Compare(x, y) != 0`.
3479  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3480    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3481  }
3482  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3483    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3484  }
3485  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3486    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3487  }
3488
3489  DECLARE_INSTRUCTION(NotEqual);
3490
3491  IfCondition GetCondition() const OVERRIDE {
3492    return kCondNE;
3493  }
3494
3495  IfCondition GetOppositeCondition() const OVERRIDE {
3496    return kCondEQ;
3497  }
3498
3499 private:
3500  template <typename T> static bool Compute(T x, T y) { return x != y; }
3501
3502  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3503};
3504
3505class HLessThan FINAL : public HCondition {
3506 public:
3507  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3508      : HCondition(first, second, dex_pc) {}
3509
3510  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3511    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3512  }
3513  // In the following Evaluate methods, a HCompare instruction has
3514  // been merged into this HLessThan instruction; evaluate it as
3515  // `Compare(x, y) < 0`.
3516  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3517    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3518  }
3519  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3520    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3521  }
3522  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3523    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3524  }
3525
3526  DECLARE_INSTRUCTION(LessThan);
3527
3528  IfCondition GetCondition() const OVERRIDE {
3529    return kCondLT;
3530  }
3531
3532  IfCondition GetOppositeCondition() const OVERRIDE {
3533    return kCondGE;
3534  }
3535
3536 private:
3537  template <typename T> static bool Compute(T x, T y) { return x < y; }
3538
3539  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3540};
3541
3542class HLessThanOrEqual FINAL : public HCondition {
3543 public:
3544  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3545      : HCondition(first, second, dex_pc) {}
3546
3547  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3548    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3549  }
3550  // In the following Evaluate methods, a HCompare instruction has
3551  // been merged into this HLessThanOrEqual instruction; evaluate it as
3552  // `Compare(x, y) <= 0`.
3553  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3554    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3555  }
3556  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3557    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3558  }
3559  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3560    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3561  }
3562
3563  DECLARE_INSTRUCTION(LessThanOrEqual);
3564
3565  IfCondition GetCondition() const OVERRIDE {
3566    return kCondLE;
3567  }
3568
3569  IfCondition GetOppositeCondition() const OVERRIDE {
3570    return kCondGT;
3571  }
3572
3573 private:
3574  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3575
3576  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3577};
3578
3579class HGreaterThan FINAL : public HCondition {
3580 public:
3581  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3582      : HCondition(first, second, dex_pc) {}
3583
3584  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3585    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3586  }
3587  // In the following Evaluate methods, a HCompare instruction has
3588  // been merged into this HGreaterThan instruction; evaluate it as
3589  // `Compare(x, y) > 0`.
3590  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3591    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3592  }
3593  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3594    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3595  }
3596  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3597    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3598  }
3599
3600  DECLARE_INSTRUCTION(GreaterThan);
3601
3602  IfCondition GetCondition() const OVERRIDE {
3603    return kCondGT;
3604  }
3605
3606  IfCondition GetOppositeCondition() const OVERRIDE {
3607    return kCondLE;
3608  }
3609
3610 private:
3611  template <typename T> static bool Compute(T x, T y) { return x > y; }
3612
3613  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3614};
3615
3616class HGreaterThanOrEqual FINAL : public HCondition {
3617 public:
3618  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3619      : HCondition(first, second, dex_pc) {}
3620
3621  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3622    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3623  }
3624  // In the following Evaluate methods, a HCompare instruction has
3625  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3626  // `Compare(x, y) >= 0`.
3627  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3628    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3629  }
3630  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3631    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3632  }
3633  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3634    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3635  }
3636
3637  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3638
3639  IfCondition GetCondition() const OVERRIDE {
3640    return kCondGE;
3641  }
3642
3643  IfCondition GetOppositeCondition() const OVERRIDE {
3644    return kCondLT;
3645  }
3646
3647 private:
3648  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3649
3650  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3651};
3652
3653class HBelow FINAL : public HCondition {
3654 public:
3655  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3656      : HCondition(first, second, dex_pc) {}
3657
3658  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3659    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3660  }
3661  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3662    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3663  }
3664  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3665                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3666    LOG(FATAL) << DebugName() << " is not defined for float values";
3667    UNREACHABLE();
3668  }
3669  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3670                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3671    LOG(FATAL) << DebugName() << " is not defined for double values";
3672    UNREACHABLE();
3673  }
3674
3675  DECLARE_INSTRUCTION(Below);
3676
3677  IfCondition GetCondition() const OVERRIDE {
3678    return kCondB;
3679  }
3680
3681  IfCondition GetOppositeCondition() const OVERRIDE {
3682    return kCondAE;
3683  }
3684
3685 private:
3686  template <typename T> static bool Compute(T x, T y) {
3687    return MakeUnsigned(x) < MakeUnsigned(y);
3688  }
3689
3690  DISALLOW_COPY_AND_ASSIGN(HBelow);
3691};
3692
3693class HBelowOrEqual FINAL : public HCondition {
3694 public:
3695  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3696      : HCondition(first, second, dex_pc) {}
3697
3698  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3699    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3700  }
3701  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3702    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3703  }
3704  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3705                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3706    LOG(FATAL) << DebugName() << " is not defined for float values";
3707    UNREACHABLE();
3708  }
3709  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3710                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3711    LOG(FATAL) << DebugName() << " is not defined for double values";
3712    UNREACHABLE();
3713  }
3714
3715  DECLARE_INSTRUCTION(BelowOrEqual);
3716
3717  IfCondition GetCondition() const OVERRIDE {
3718    return kCondBE;
3719  }
3720
3721  IfCondition GetOppositeCondition() const OVERRIDE {
3722    return kCondA;
3723  }
3724
3725 private:
3726  template <typename T> static bool Compute(T x, T y) {
3727    return MakeUnsigned(x) <= MakeUnsigned(y);
3728  }
3729
3730  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3731};
3732
3733class HAbove FINAL : public HCondition {
3734 public:
3735  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3736      : HCondition(first, second, dex_pc) {}
3737
3738  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3739    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3740  }
3741  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3742    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3743  }
3744  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3745                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3746    LOG(FATAL) << DebugName() << " is not defined for float values";
3747    UNREACHABLE();
3748  }
3749  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3750                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3751    LOG(FATAL) << DebugName() << " is not defined for double values";
3752    UNREACHABLE();
3753  }
3754
3755  DECLARE_INSTRUCTION(Above);
3756
3757  IfCondition GetCondition() const OVERRIDE {
3758    return kCondA;
3759  }
3760
3761  IfCondition GetOppositeCondition() const OVERRIDE {
3762    return kCondBE;
3763  }
3764
3765 private:
3766  template <typename T> static bool Compute(T x, T y) {
3767    return MakeUnsigned(x) > MakeUnsigned(y);
3768  }
3769
3770  DISALLOW_COPY_AND_ASSIGN(HAbove);
3771};
3772
3773class HAboveOrEqual FINAL : public HCondition {
3774 public:
3775  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3776      : HCondition(first, second, dex_pc) {}
3777
3778  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3779    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3780  }
3781  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3782    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3783  }
3784  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3785                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3786    LOG(FATAL) << DebugName() << " is not defined for float values";
3787    UNREACHABLE();
3788  }
3789  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3790                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3791    LOG(FATAL) << DebugName() << " is not defined for double values";
3792    UNREACHABLE();
3793  }
3794
3795  DECLARE_INSTRUCTION(AboveOrEqual);
3796
3797  IfCondition GetCondition() const OVERRIDE {
3798    return kCondAE;
3799  }
3800
3801  IfCondition GetOppositeCondition() const OVERRIDE {
3802    return kCondB;
3803  }
3804
3805 private:
3806  template <typename T> static bool Compute(T x, T y) {
3807    return MakeUnsigned(x) >= MakeUnsigned(y);
3808  }
3809
3810  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3811};
3812
3813// Instruction to check how two inputs compare to each other.
3814// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3815class HCompare FINAL : public HBinaryOperation {
3816 public:
3817  // Note that `comparison_type` is the type of comparison performed
3818  // between the comparison's inputs, not the type of the instantiated
3819  // HCompare instruction (which is always Primitive::kPrimInt).
3820  HCompare(Primitive::Type comparison_type,
3821           HInstruction* first,
3822           HInstruction* second,
3823           ComparisonBias bias,
3824           uint32_t dex_pc)
3825      : HBinaryOperation(Primitive::kPrimInt,
3826                         first,
3827                         second,
3828                         SideEffectsForArchRuntimeCalls(comparison_type),
3829                         dex_pc) {
3830    SetPackedField<ComparisonBiasField>(bias);
3831    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3832    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3833  }
3834
3835  template <typename T>
3836  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3837
3838  template <typename T>
3839  int32_t ComputeFP(T x, T y) const {
3840    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3841    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3842    // Handle the bias.
3843    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3844  }
3845
3846  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3847    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3848    // reach this code path when processing a freshly built HIR
3849    // graph. However HCompare integer instructions can be synthesized
3850    // by the instruction simplifier to implement IntegerCompare and
3851    // IntegerSignum intrinsics, so we have to handle this case.
3852    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3853  }
3854  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3855    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3856  }
3857  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3858    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3859  }
3860  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3861    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3862  }
3863
3864  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3865    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3866  }
3867
3868  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3869
3870  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3871  // Only meaningful for floating-point comparisons.
3872  bool IsGtBias() const {
3873    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3874    return GetBias() == ComparisonBias::kGtBias;
3875  }
3876
3877  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3878    // Comparisons do not require a runtime call in any back end.
3879    return SideEffects::None();
3880  }
3881
3882  DECLARE_INSTRUCTION(Compare);
3883
3884 protected:
3885  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3886  static constexpr size_t kFieldComparisonBiasSize =
3887      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3888  static constexpr size_t kNumberOfComparePackedBits =
3889      kFieldComparisonBias + kFieldComparisonBiasSize;
3890  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3891  using ComparisonBiasField =
3892      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3893
3894  // Return an integer constant containing the result of a comparison evaluated at compile time.
3895  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3896    DCHECK(value == -1 || value == 0 || value == 1) << value;
3897    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3898  }
3899
3900 private:
3901  DISALLOW_COPY_AND_ASSIGN(HCompare);
3902};
3903
3904class HNewInstance FINAL : public HExpression<1> {
3905 public:
3906  HNewInstance(HInstruction* cls,
3907               uint32_t dex_pc,
3908               dex::TypeIndex type_index,
3909               const DexFile& dex_file,
3910               bool finalizable,
3911               QuickEntrypointEnum entrypoint)
3912      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3913        type_index_(type_index),
3914        dex_file_(dex_file),
3915        entrypoint_(entrypoint) {
3916    SetPackedFlag<kFlagFinalizable>(finalizable);
3917    SetRawInputAt(0, cls);
3918  }
3919
3920  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3921  const DexFile& GetDexFile() const { return dex_file_; }
3922
3923  // Calls runtime so needs an environment.
3924  bool NeedsEnvironment() const OVERRIDE { return true; }
3925
3926  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3927  bool CanThrow() const OVERRIDE { return true; }
3928
3929  bool NeedsChecks() const {
3930    return entrypoint_ == kQuickAllocObjectWithChecks;
3931  }
3932
3933  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3934
3935  bool CanBeNull() const OVERRIDE { return false; }
3936
3937  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3938
3939  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3940    entrypoint_ = entrypoint;
3941  }
3942
3943  HLoadClass* GetLoadClass() const {
3944    HInstruction* input = InputAt(0);
3945    if (input->IsClinitCheck()) {
3946      input = input->InputAt(0);
3947    }
3948    DCHECK(input->IsLoadClass());
3949    return input->AsLoadClass();
3950  }
3951
3952  bool IsStringAlloc() const;
3953
3954  DECLARE_INSTRUCTION(NewInstance);
3955
3956 private:
3957  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3958  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3959  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3960                "Too many packed fields.");
3961
3962  const dex::TypeIndex type_index_;
3963  const DexFile& dex_file_;
3964  QuickEntrypointEnum entrypoint_;
3965
3966  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3967};
3968
3969enum IntrinsicNeedsEnvironmentOrCache {
3970  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3971  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3972};
3973
3974enum IntrinsicSideEffects {
3975  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3976  kReadSideEffects,   // Intrinsic may read heap memory.
3977  kWriteSideEffects,  // Intrinsic may write heap memory.
3978  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3979};
3980
3981enum IntrinsicExceptions {
3982  kNoThrow,  // Intrinsic does not throw any exceptions.
3983  kCanThrow  // Intrinsic may throw exceptions.
3984};
3985
3986class HInvoke : public HVariableInputSizeInstruction {
3987 public:
3988  bool NeedsEnvironment() const OVERRIDE;
3989
3990  void SetArgumentAt(size_t index, HInstruction* argument) {
3991    SetRawInputAt(index, argument);
3992  }
3993
3994  // Return the number of arguments.  This number can be lower than
3995  // the number of inputs returned by InputCount(), as some invoke
3996  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3997  // inputs at the end of their list of inputs.
3998  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3999
4000  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4001
4002  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4003
4004  InvokeType GetInvokeType() const {
4005    return GetPackedField<InvokeTypeField>();
4006  }
4007
4008  Intrinsics GetIntrinsic() const {
4009    return intrinsic_;
4010  }
4011
4012  void SetIntrinsic(Intrinsics intrinsic,
4013                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4014                    IntrinsicSideEffects side_effects,
4015                    IntrinsicExceptions exceptions);
4016
4017  bool IsFromInlinedInvoke() const {
4018    return GetEnvironment()->IsFromInlinedInvoke();
4019  }
4020
4021  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4022
4023  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4024
4025  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4026
4027  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4028    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4029  }
4030
4031  uint32_t* GetIntrinsicOptimizations() {
4032    return &intrinsic_optimizations_;
4033  }
4034
4035  const uint32_t* GetIntrinsicOptimizations() const {
4036    return &intrinsic_optimizations_;
4037  }
4038
4039  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4040
4041  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4042  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4043
4044  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4045
4046 protected:
4047  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4048  static constexpr size_t kFieldInvokeTypeSize =
4049      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4050  static constexpr size_t kFieldReturnType =
4051      kFieldInvokeType + kFieldInvokeTypeSize;
4052  static constexpr size_t kFieldReturnTypeSize =
4053      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
4054  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4055  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4056  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4057  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4058  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
4059
4060  HInvoke(ArenaAllocator* arena,
4061          uint32_t number_of_arguments,
4062          uint32_t number_of_other_inputs,
4063          Primitive::Type return_type,
4064          uint32_t dex_pc,
4065          uint32_t dex_method_index,
4066          ArtMethod* resolved_method,
4067          InvokeType invoke_type)
4068    : HVariableInputSizeInstruction(
4069          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4070          dex_pc,
4071          arena,
4072          number_of_arguments + number_of_other_inputs,
4073          kArenaAllocInvokeInputs),
4074      number_of_arguments_(number_of_arguments),
4075      resolved_method_(resolved_method),
4076      dex_method_index_(dex_method_index),
4077      intrinsic_(Intrinsics::kNone),
4078      intrinsic_optimizations_(0) {
4079    SetPackedField<ReturnTypeField>(return_type);
4080    SetPackedField<InvokeTypeField>(invoke_type);
4081    SetPackedFlag<kFlagCanThrow>(true);
4082  }
4083
4084  uint32_t number_of_arguments_;
4085  ArtMethod* resolved_method_;
4086  const uint32_t dex_method_index_;
4087  Intrinsics intrinsic_;
4088
4089  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4090  uint32_t intrinsic_optimizations_;
4091
4092 private:
4093  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4094};
4095
4096class HInvokeUnresolved FINAL : public HInvoke {
4097 public:
4098  HInvokeUnresolved(ArenaAllocator* arena,
4099                    uint32_t number_of_arguments,
4100                    Primitive::Type return_type,
4101                    uint32_t dex_pc,
4102                    uint32_t dex_method_index,
4103                    InvokeType invoke_type)
4104      : HInvoke(arena,
4105                number_of_arguments,
4106                0u /* number_of_other_inputs */,
4107                return_type,
4108                dex_pc,
4109                dex_method_index,
4110                nullptr,
4111                invoke_type) {
4112  }
4113
4114  DECLARE_INSTRUCTION(InvokeUnresolved);
4115
4116 private:
4117  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4118};
4119
4120class HInvokePolymorphic FINAL : public HInvoke {
4121 public:
4122  HInvokePolymorphic(ArenaAllocator* arena,
4123                     uint32_t number_of_arguments,
4124                     Primitive::Type return_type,
4125                     uint32_t dex_pc,
4126                     uint32_t dex_method_index)
4127      : HInvoke(arena,
4128                number_of_arguments,
4129                0u /* number_of_other_inputs */,
4130                return_type,
4131                dex_pc,
4132                dex_method_index,
4133                nullptr,
4134                kVirtual) {}
4135
4136  DECLARE_INSTRUCTION(InvokePolymorphic);
4137
4138 private:
4139  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4140};
4141
4142class HInvokeStaticOrDirect FINAL : public HInvoke {
4143 public:
4144  // Requirements of this method call regarding the class
4145  // initialization (clinit) check of its declaring class.
4146  enum class ClinitCheckRequirement {
4147    kNone,      // Class already initialized.
4148    kExplicit,  // Static call having explicit clinit check as last input.
4149    kImplicit,  // Static call implicitly requiring a clinit check.
4150    kLast = kImplicit
4151  };
4152
4153  // Determines how to load the target ArtMethod*.
4154  enum class MethodLoadKind {
4155    // Use a String init ArtMethod* loaded from Thread entrypoints.
4156    kStringInit,
4157
4158    // Use the method's own ArtMethod* loaded by the register allocator.
4159    kRecursive,
4160
4161    // Use PC-relative boot image ArtMethod* address that will be known at link time.
4162    // Used for boot image methods referenced by boot image code.
4163    kBootImageLinkTimePcRelative,
4164
4165    // Use ArtMethod* at a known address, embed the direct address in the code.
4166    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4167    kDirectAddress,
4168
4169    // Load from resolved methods array in the dex cache using a PC-relative load.
4170    // Used when we need to use the dex cache, for example for invoke-static that
4171    // may cause class initialization (the entry may point to a resolution method),
4172    // and we know that we can access the dex cache arrays using a PC-relative load.
4173    kDexCachePcRelative,
4174
4175    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4176    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4177    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4178    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4179    kDexCacheViaMethod,
4180  };
4181
4182  // Determines the location of the code pointer.
4183  enum class CodePtrLocation {
4184    // Recursive call, use local PC-relative call instruction.
4185    kCallSelf,
4186
4187    // Use code pointer from the ArtMethod*.
4188    // Used when we don't know the target code. This is also the last-resort-kind used when
4189    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4190    kCallArtMethod,
4191  };
4192
4193  struct DispatchInfo {
4194    MethodLoadKind method_load_kind;
4195    CodePtrLocation code_ptr_location;
4196    // The method load data holds
4197    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4198    //     Note that there are multiple string init methods, each having its own offset.
4199    //   - the method address for kDirectAddress
4200    //   - the dex cache arrays offset for kDexCachePcRel.
4201    uint64_t method_load_data;
4202  };
4203
4204  HInvokeStaticOrDirect(ArenaAllocator* arena,
4205                        uint32_t number_of_arguments,
4206                        Primitive::Type return_type,
4207                        uint32_t dex_pc,
4208                        uint32_t method_index,
4209                        ArtMethod* resolved_method,
4210                        DispatchInfo dispatch_info,
4211                        InvokeType invoke_type,
4212                        MethodReference target_method,
4213                        ClinitCheckRequirement clinit_check_requirement)
4214      : HInvoke(arena,
4215                number_of_arguments,
4216                // There is potentially one extra argument for the HCurrentMethod node, and
4217                // potentially one other if the clinit check is explicit, and potentially
4218                // one other if the method is a string factory.
4219                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4220                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4221                return_type,
4222                dex_pc,
4223                method_index,
4224                resolved_method,
4225                invoke_type),
4226        target_method_(target_method),
4227        dispatch_info_(dispatch_info) {
4228    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4229  }
4230
4231  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4232    bool had_current_method_input = HasCurrentMethodInput();
4233    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4234
4235    // Using the current method is the default and once we find a better
4236    // method load kind, we should not go back to using the current method.
4237    DCHECK(had_current_method_input || !needs_current_method_input);
4238
4239    if (had_current_method_input && !needs_current_method_input) {
4240      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4241      RemoveInputAt(GetSpecialInputIndex());
4242    }
4243    dispatch_info_ = dispatch_info;
4244  }
4245
4246  DispatchInfo GetDispatchInfo() const {
4247    return dispatch_info_;
4248  }
4249
4250  void AddSpecialInput(HInstruction* input) {
4251    // We allow only one special input.
4252    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4253    DCHECK(InputCount() == GetSpecialInputIndex() ||
4254           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4255    InsertInputAt(GetSpecialInputIndex(), input);
4256  }
4257
4258  using HInstruction::GetInputRecords;  // Keep the const version visible.
4259  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4260    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4261    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4262      DCHECK(!input_records.empty());
4263      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4264      HInstruction* last_input = input_records.back().GetInstruction();
4265      // Note: `last_input` may be null during arguments setup.
4266      if (last_input != nullptr) {
4267        // `last_input` is the last input of a static invoke marked as having
4268        // an explicit clinit check. It must either be:
4269        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4270        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4271        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4272      }
4273    }
4274    return input_records;
4275  }
4276
4277  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4278    // We access the method via the dex cache so we can't do an implicit null check.
4279    // TODO: for intrinsics we can generate implicit null checks.
4280    return false;
4281  }
4282
4283  bool CanBeNull() const OVERRIDE {
4284    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4285  }
4286
4287  // Get the index of the special input, if any.
4288  //
4289  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4290  // method pointer; otherwise there may be one platform-specific special input,
4291  // such as PC-relative addressing base.
4292  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4293  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4294
4295  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4296  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4297  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4298  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4299  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4300  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4301  bool HasPcRelativeDexCache() const {
4302    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4303  }
4304  bool HasPcRelativeMethodLoadKind() const {
4305    return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4306           GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4307  }
4308  bool HasCurrentMethodInput() const {
4309    // This function can be called only after the invoke has been fully initialized by the builder.
4310    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4311      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4312      return true;
4313    } else {
4314      DCHECK(InputCount() == GetSpecialInputIndex() ||
4315             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4316      return false;
4317    }
4318  }
4319
4320  QuickEntrypointEnum GetStringInitEntryPoint() const {
4321    DCHECK(IsStringInit());
4322    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4323  }
4324
4325  uint64_t GetMethodAddress() const {
4326    DCHECK(HasMethodAddress());
4327    return dispatch_info_.method_load_data;
4328  }
4329
4330  uint32_t GetDexCacheArrayOffset() const {
4331    DCHECK(HasPcRelativeDexCache());
4332    return dispatch_info_.method_load_data;
4333  }
4334
4335  const DexFile& GetDexFileForPcRelativeDexCache() const;
4336
4337  ClinitCheckRequirement GetClinitCheckRequirement() const {
4338    return GetPackedField<ClinitCheckRequirementField>();
4339  }
4340
4341  // Is this instruction a call to a static method?
4342  bool IsStatic() const {
4343    return GetInvokeType() == kStatic;
4344  }
4345
4346  MethodReference GetTargetMethod() const {
4347    return target_method_;
4348  }
4349
4350  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4351  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4352  // instruction; only relevant for static calls with explicit clinit check.
4353  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4354    DCHECK(IsStaticWithExplicitClinitCheck());
4355    size_t last_input_index = inputs_.size() - 1u;
4356    HInstruction* last_input = inputs_.back().GetInstruction();
4357    DCHECK(last_input != nullptr);
4358    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4359    RemoveAsUserOfInput(last_input_index);
4360    inputs_.pop_back();
4361    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4362    DCHECK(!IsStaticWithExplicitClinitCheck());
4363  }
4364
4365  // Is this a call to a static method whose declaring class has an
4366  // explicit initialization check in the graph?
4367  bool IsStaticWithExplicitClinitCheck() const {
4368    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4369  }
4370
4371  // Is this a call to a static method whose declaring class has an
4372  // implicit intialization check requirement?
4373  bool IsStaticWithImplicitClinitCheck() const {
4374    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4375  }
4376
4377  // Does this method load kind need the current method as an input?
4378  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4379    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4380  }
4381
4382  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4383
4384 private:
4385  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4386  static constexpr size_t kFieldClinitCheckRequirementSize =
4387      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4388  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4389      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4390  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4391                "Too many packed fields.");
4392  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4393                                               kFieldClinitCheckRequirement,
4394                                               kFieldClinitCheckRequirementSize>;
4395
4396  // Cached values of the resolved method, to avoid needing the mutator lock.
4397  MethodReference target_method_;
4398  DispatchInfo dispatch_info_;
4399
4400  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4401};
4402std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4403std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4404
4405class HInvokeVirtual FINAL : public HInvoke {
4406 public:
4407  HInvokeVirtual(ArenaAllocator* arena,
4408                 uint32_t number_of_arguments,
4409                 Primitive::Type return_type,
4410                 uint32_t dex_pc,
4411                 uint32_t dex_method_index,
4412                 ArtMethod* resolved_method,
4413                 uint32_t vtable_index)
4414      : HInvoke(arena,
4415                number_of_arguments,
4416                0u,
4417                return_type,
4418                dex_pc,
4419                dex_method_index,
4420                resolved_method,
4421                kVirtual),
4422        vtable_index_(vtable_index) {}
4423
4424  bool CanBeNull() const OVERRIDE {
4425    switch (GetIntrinsic()) {
4426      case Intrinsics::kThreadCurrentThread:
4427      case Intrinsics::kStringBufferAppend:
4428      case Intrinsics::kStringBufferToString:
4429      case Intrinsics::kStringBuilderAppend:
4430      case Intrinsics::kStringBuilderToString:
4431        return false;
4432      default:
4433        return HInvoke::CanBeNull();
4434    }
4435  }
4436
4437  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4438    // TODO: Add implicit null checks in intrinsics.
4439    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4440  }
4441
4442  uint32_t GetVTableIndex() const { return vtable_index_; }
4443
4444  DECLARE_INSTRUCTION(InvokeVirtual);
4445
4446 private:
4447  // Cached value of the resolved method, to avoid needing the mutator lock.
4448  const uint32_t vtable_index_;
4449
4450  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4451};
4452
4453class HInvokeInterface FINAL : public HInvoke {
4454 public:
4455  HInvokeInterface(ArenaAllocator* arena,
4456                   uint32_t number_of_arguments,
4457                   Primitive::Type return_type,
4458                   uint32_t dex_pc,
4459                   uint32_t dex_method_index,
4460                   ArtMethod* resolved_method,
4461                   uint32_t imt_index)
4462      : HInvoke(arena,
4463                number_of_arguments,
4464                0u,
4465                return_type,
4466                dex_pc,
4467                dex_method_index,
4468                resolved_method,
4469                kInterface),
4470        imt_index_(imt_index) {}
4471
4472  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4473    // TODO: Add implicit null checks in intrinsics.
4474    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4475  }
4476
4477  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4478    // The assembly stub currently needs it.
4479    return true;
4480  }
4481
4482  uint32_t GetImtIndex() const { return imt_index_; }
4483  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4484
4485  DECLARE_INSTRUCTION(InvokeInterface);
4486
4487 private:
4488  // Cached value of the resolved method, to avoid needing the mutator lock.
4489  const uint32_t imt_index_;
4490
4491  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4492};
4493
4494class HNeg FINAL : public HUnaryOperation {
4495 public:
4496  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4497      : HUnaryOperation(result_type, input, dex_pc) {
4498    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4499  }
4500
4501  template <typename T> static T Compute(T x) { return -x; }
4502
4503  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4504    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4505  }
4506  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4507    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4508  }
4509  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4510    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4511  }
4512  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4513    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4514  }
4515
4516  DECLARE_INSTRUCTION(Neg);
4517
4518 private:
4519  DISALLOW_COPY_AND_ASSIGN(HNeg);
4520};
4521
4522class HNewArray FINAL : public HExpression<2> {
4523 public:
4524  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4525      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) {
4526    SetRawInputAt(0, cls);
4527    SetRawInputAt(1, length);
4528  }
4529
4530  // Calls runtime so needs an environment.
4531  bool NeedsEnvironment() const OVERRIDE { return true; }
4532
4533  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4534  bool CanThrow() const OVERRIDE { return true; }
4535
4536  bool CanBeNull() const OVERRIDE { return false; }
4537
4538  HLoadClass* GetLoadClass() const {
4539    DCHECK(InputAt(0)->IsLoadClass());
4540    return InputAt(0)->AsLoadClass();
4541  }
4542
4543  HInstruction* GetLength() const {
4544    return InputAt(1);
4545  }
4546
4547  DECLARE_INSTRUCTION(NewArray);
4548
4549 private:
4550  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4551};
4552
4553class HAdd FINAL : public HBinaryOperation {
4554 public:
4555  HAdd(Primitive::Type result_type,
4556       HInstruction* left,
4557       HInstruction* right,
4558       uint32_t dex_pc = kNoDexPc)
4559      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4560
4561  bool IsCommutative() const OVERRIDE { return true; }
4562
4563  template <typename T> static T Compute(T x, T y) { return x + y; }
4564
4565  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4566    return GetBlock()->GetGraph()->GetIntConstant(
4567        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4568  }
4569  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4570    return GetBlock()->GetGraph()->GetLongConstant(
4571        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4572  }
4573  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4574    return GetBlock()->GetGraph()->GetFloatConstant(
4575        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4576  }
4577  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4578    return GetBlock()->GetGraph()->GetDoubleConstant(
4579        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4580  }
4581
4582  DECLARE_INSTRUCTION(Add);
4583
4584 private:
4585  DISALLOW_COPY_AND_ASSIGN(HAdd);
4586};
4587
4588class HSub FINAL : public HBinaryOperation {
4589 public:
4590  HSub(Primitive::Type result_type,
4591       HInstruction* left,
4592       HInstruction* right,
4593       uint32_t dex_pc = kNoDexPc)
4594      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4595
4596  template <typename T> static T Compute(T x, T y) { return x - y; }
4597
4598  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4599    return GetBlock()->GetGraph()->GetIntConstant(
4600        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4601  }
4602  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4603    return GetBlock()->GetGraph()->GetLongConstant(
4604        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4605  }
4606  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4607    return GetBlock()->GetGraph()->GetFloatConstant(
4608        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4609  }
4610  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4611    return GetBlock()->GetGraph()->GetDoubleConstant(
4612        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4613  }
4614
4615  DECLARE_INSTRUCTION(Sub);
4616
4617 private:
4618  DISALLOW_COPY_AND_ASSIGN(HSub);
4619};
4620
4621class HMul FINAL : public HBinaryOperation {
4622 public:
4623  HMul(Primitive::Type result_type,
4624       HInstruction* left,
4625       HInstruction* right,
4626       uint32_t dex_pc = kNoDexPc)
4627      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4628
4629  bool IsCommutative() const OVERRIDE { return true; }
4630
4631  template <typename T> static T Compute(T x, T y) { return x * y; }
4632
4633  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4634    return GetBlock()->GetGraph()->GetIntConstant(
4635        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4636  }
4637  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4638    return GetBlock()->GetGraph()->GetLongConstant(
4639        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4640  }
4641  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4642    return GetBlock()->GetGraph()->GetFloatConstant(
4643        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4644  }
4645  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4646    return GetBlock()->GetGraph()->GetDoubleConstant(
4647        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4648  }
4649
4650  DECLARE_INSTRUCTION(Mul);
4651
4652 private:
4653  DISALLOW_COPY_AND_ASSIGN(HMul);
4654};
4655
4656class HDiv FINAL : public HBinaryOperation {
4657 public:
4658  HDiv(Primitive::Type result_type,
4659       HInstruction* left,
4660       HInstruction* right,
4661       uint32_t dex_pc)
4662      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4663
4664  template <typename T>
4665  T ComputeIntegral(T x, T y) const {
4666    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4667    // Our graph structure ensures we never have 0 for `y` during
4668    // constant folding.
4669    DCHECK_NE(y, 0);
4670    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4671    return (y == -1) ? -x : x / y;
4672  }
4673
4674  template <typename T>
4675  T ComputeFP(T x, T y) const {
4676    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4677    return x / y;
4678  }
4679
4680  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4681    return GetBlock()->GetGraph()->GetIntConstant(
4682        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4683  }
4684  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4685    return GetBlock()->GetGraph()->GetLongConstant(
4686        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4687  }
4688  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4689    return GetBlock()->GetGraph()->GetFloatConstant(
4690        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4691  }
4692  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4693    return GetBlock()->GetGraph()->GetDoubleConstant(
4694        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4695  }
4696
4697  DECLARE_INSTRUCTION(Div);
4698
4699 private:
4700  DISALLOW_COPY_AND_ASSIGN(HDiv);
4701};
4702
4703class HRem FINAL : public HBinaryOperation {
4704 public:
4705  HRem(Primitive::Type result_type,
4706       HInstruction* left,
4707       HInstruction* right,
4708       uint32_t dex_pc)
4709      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4710
4711  template <typename T>
4712  T ComputeIntegral(T x, T y) const {
4713    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4714    // Our graph structure ensures we never have 0 for `y` during
4715    // constant folding.
4716    DCHECK_NE(y, 0);
4717    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4718    return (y == -1) ? 0 : x % y;
4719  }
4720
4721  template <typename T>
4722  T ComputeFP(T x, T y) const {
4723    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4724    return std::fmod(x, y);
4725  }
4726
4727  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4728    return GetBlock()->GetGraph()->GetIntConstant(
4729        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4730  }
4731  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4732    return GetBlock()->GetGraph()->GetLongConstant(
4733        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4734  }
4735  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4736    return GetBlock()->GetGraph()->GetFloatConstant(
4737        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4738  }
4739  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4740    return GetBlock()->GetGraph()->GetDoubleConstant(
4741        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4742  }
4743
4744  DECLARE_INSTRUCTION(Rem);
4745
4746 private:
4747  DISALLOW_COPY_AND_ASSIGN(HRem);
4748};
4749
4750class HDivZeroCheck FINAL : public HExpression<1> {
4751 public:
4752  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4753  // constructor.
4754  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4755      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4756    SetRawInputAt(0, value);
4757  }
4758
4759  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4760
4761  bool CanBeMoved() const OVERRIDE { return true; }
4762
4763  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4764    return true;
4765  }
4766
4767  bool NeedsEnvironment() const OVERRIDE { return true; }
4768  bool CanThrow() const OVERRIDE { return true; }
4769
4770  DECLARE_INSTRUCTION(DivZeroCheck);
4771
4772 private:
4773  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4774};
4775
4776class HShl FINAL : public HBinaryOperation {
4777 public:
4778  HShl(Primitive::Type result_type,
4779       HInstruction* value,
4780       HInstruction* distance,
4781       uint32_t dex_pc = kNoDexPc)
4782      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4783    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4784    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4785  }
4786
4787  template <typename T>
4788  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4789    return value << (distance & max_shift_distance);
4790  }
4791
4792  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4793    return GetBlock()->GetGraph()->GetIntConstant(
4794        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4795  }
4796  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4797    return GetBlock()->GetGraph()->GetLongConstant(
4798        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4799  }
4800  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4801                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4802    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4803    UNREACHABLE();
4804  }
4805  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4806                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4807    LOG(FATAL) << DebugName() << " is not defined for float values";
4808    UNREACHABLE();
4809  }
4810  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4811                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4812    LOG(FATAL) << DebugName() << " is not defined for double values";
4813    UNREACHABLE();
4814  }
4815
4816  DECLARE_INSTRUCTION(Shl);
4817
4818 private:
4819  DISALLOW_COPY_AND_ASSIGN(HShl);
4820};
4821
4822class HShr FINAL : public HBinaryOperation {
4823 public:
4824  HShr(Primitive::Type result_type,
4825       HInstruction* value,
4826       HInstruction* distance,
4827       uint32_t dex_pc = kNoDexPc)
4828      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4829    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4830    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4831  }
4832
4833  template <typename T>
4834  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4835    return value >> (distance & max_shift_distance);
4836  }
4837
4838  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4839    return GetBlock()->GetGraph()->GetIntConstant(
4840        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4841  }
4842  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4843    return GetBlock()->GetGraph()->GetLongConstant(
4844        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4845  }
4846  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4847                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4848    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4849    UNREACHABLE();
4850  }
4851  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4852                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4853    LOG(FATAL) << DebugName() << " is not defined for float values";
4854    UNREACHABLE();
4855  }
4856  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4857                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4858    LOG(FATAL) << DebugName() << " is not defined for double values";
4859    UNREACHABLE();
4860  }
4861
4862  DECLARE_INSTRUCTION(Shr);
4863
4864 private:
4865  DISALLOW_COPY_AND_ASSIGN(HShr);
4866};
4867
4868class HUShr FINAL : public HBinaryOperation {
4869 public:
4870  HUShr(Primitive::Type result_type,
4871        HInstruction* value,
4872        HInstruction* distance,
4873        uint32_t dex_pc = kNoDexPc)
4874      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4875    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4876    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4877  }
4878
4879  template <typename T>
4880  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4881    typedef typename std::make_unsigned<T>::type V;
4882    V ux = static_cast<V>(value);
4883    return static_cast<T>(ux >> (distance & max_shift_distance));
4884  }
4885
4886  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4887    return GetBlock()->GetGraph()->GetIntConstant(
4888        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4889  }
4890  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4891    return GetBlock()->GetGraph()->GetLongConstant(
4892        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4893  }
4894  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4895                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4896    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4897    UNREACHABLE();
4898  }
4899  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4900                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4901    LOG(FATAL) << DebugName() << " is not defined for float values";
4902    UNREACHABLE();
4903  }
4904  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4905                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4906    LOG(FATAL) << DebugName() << " is not defined for double values";
4907    UNREACHABLE();
4908  }
4909
4910  DECLARE_INSTRUCTION(UShr);
4911
4912 private:
4913  DISALLOW_COPY_AND_ASSIGN(HUShr);
4914};
4915
4916class HAnd FINAL : public HBinaryOperation {
4917 public:
4918  HAnd(Primitive::Type result_type,
4919       HInstruction* left,
4920       HInstruction* right,
4921       uint32_t dex_pc = kNoDexPc)
4922      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4923
4924  bool IsCommutative() const OVERRIDE { return true; }
4925
4926  template <typename T> static T Compute(T x, T y) { return x & y; }
4927
4928  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4929    return GetBlock()->GetGraph()->GetIntConstant(
4930        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4931  }
4932  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4933    return GetBlock()->GetGraph()->GetLongConstant(
4934        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4935  }
4936  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4937                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4938    LOG(FATAL) << DebugName() << " is not defined for float values";
4939    UNREACHABLE();
4940  }
4941  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4942                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4943    LOG(FATAL) << DebugName() << " is not defined for double values";
4944    UNREACHABLE();
4945  }
4946
4947  DECLARE_INSTRUCTION(And);
4948
4949 private:
4950  DISALLOW_COPY_AND_ASSIGN(HAnd);
4951};
4952
4953class HOr FINAL : public HBinaryOperation {
4954 public:
4955  HOr(Primitive::Type result_type,
4956      HInstruction* left,
4957      HInstruction* right,
4958      uint32_t dex_pc = kNoDexPc)
4959      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4960
4961  bool IsCommutative() const OVERRIDE { return true; }
4962
4963  template <typename T> static T Compute(T x, T y) { return x | y; }
4964
4965  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4966    return GetBlock()->GetGraph()->GetIntConstant(
4967        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4968  }
4969  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4970    return GetBlock()->GetGraph()->GetLongConstant(
4971        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4972  }
4973  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4974                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4975    LOG(FATAL) << DebugName() << " is not defined for float values";
4976    UNREACHABLE();
4977  }
4978  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4979                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4980    LOG(FATAL) << DebugName() << " is not defined for double values";
4981    UNREACHABLE();
4982  }
4983
4984  DECLARE_INSTRUCTION(Or);
4985
4986 private:
4987  DISALLOW_COPY_AND_ASSIGN(HOr);
4988};
4989
4990class HXor FINAL : public HBinaryOperation {
4991 public:
4992  HXor(Primitive::Type result_type,
4993       HInstruction* left,
4994       HInstruction* right,
4995       uint32_t dex_pc = kNoDexPc)
4996      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4997
4998  bool IsCommutative() const OVERRIDE { return true; }
4999
5000  template <typename T> static T Compute(T x, T y) { return x ^ y; }
5001
5002  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5003    return GetBlock()->GetGraph()->GetIntConstant(
5004        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5005  }
5006  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5007    return GetBlock()->GetGraph()->GetLongConstant(
5008        Compute(x->GetValue(), y->GetValue()), GetDexPc());
5009  }
5010  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5011                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5012    LOG(FATAL) << DebugName() << " is not defined for float values";
5013    UNREACHABLE();
5014  }
5015  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5016                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5017    LOG(FATAL) << DebugName() << " is not defined for double values";
5018    UNREACHABLE();
5019  }
5020
5021  DECLARE_INSTRUCTION(Xor);
5022
5023 private:
5024  DISALLOW_COPY_AND_ASSIGN(HXor);
5025};
5026
5027class HRor FINAL : public HBinaryOperation {
5028 public:
5029  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
5030    : HBinaryOperation(result_type, value, distance) {
5031    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
5032    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
5033  }
5034
5035  template <typename T>
5036  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5037    typedef typename std::make_unsigned<T>::type V;
5038    V ux = static_cast<V>(value);
5039    if ((distance & max_shift_value) == 0) {
5040      return static_cast<T>(ux);
5041    } else {
5042      const V reg_bits = sizeof(T) * 8;
5043      return static_cast<T>(ux >> (distance & max_shift_value)) |
5044                           (value << (reg_bits - (distance & max_shift_value)));
5045    }
5046  }
5047
5048  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5049    return GetBlock()->GetGraph()->GetIntConstant(
5050        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5051  }
5052  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5053    return GetBlock()->GetGraph()->GetLongConstant(
5054        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5055  }
5056  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5057                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5058    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5059    UNREACHABLE();
5060  }
5061  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5062                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5063    LOG(FATAL) << DebugName() << " is not defined for float values";
5064    UNREACHABLE();
5065  }
5066  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5067                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5068    LOG(FATAL) << DebugName() << " is not defined for double values";
5069    UNREACHABLE();
5070  }
5071
5072  DECLARE_INSTRUCTION(Ror);
5073
5074 private:
5075  DISALLOW_COPY_AND_ASSIGN(HRor);
5076};
5077
5078// The value of a parameter in this method. Its location depends on
5079// the calling convention.
5080class HParameterValue FINAL : public HExpression<0> {
5081 public:
5082  HParameterValue(const DexFile& dex_file,
5083                  dex::TypeIndex type_index,
5084                  uint8_t index,
5085                  Primitive::Type parameter_type,
5086                  bool is_this = false)
5087      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5088        dex_file_(dex_file),
5089        type_index_(type_index),
5090        index_(index) {
5091    SetPackedFlag<kFlagIsThis>(is_this);
5092    SetPackedFlag<kFlagCanBeNull>(!is_this);
5093  }
5094
5095  const DexFile& GetDexFile() const { return dex_file_; }
5096  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5097  uint8_t GetIndex() const { return index_; }
5098  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5099
5100  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5101  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5102
5103  DECLARE_INSTRUCTION(ParameterValue);
5104
5105 private:
5106  // Whether or not the parameter value corresponds to 'this' argument.
5107  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5108  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5109  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5110  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5111                "Too many packed fields.");
5112
5113  const DexFile& dex_file_;
5114  const dex::TypeIndex type_index_;
5115  // The index of this parameter in the parameters list. Must be less
5116  // than HGraph::number_of_in_vregs_.
5117  const uint8_t index_;
5118
5119  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5120};
5121
5122class HNot FINAL : public HUnaryOperation {
5123 public:
5124  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5125      : HUnaryOperation(result_type, input, dex_pc) {}
5126
5127  bool CanBeMoved() const OVERRIDE { return true; }
5128  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5129    return true;
5130  }
5131
5132  template <typename T> static T Compute(T x) { return ~x; }
5133
5134  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5135    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5136  }
5137  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5138    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5139  }
5140  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5141    LOG(FATAL) << DebugName() << " is not defined for float values";
5142    UNREACHABLE();
5143  }
5144  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5145    LOG(FATAL) << DebugName() << " is not defined for double values";
5146    UNREACHABLE();
5147  }
5148
5149  DECLARE_INSTRUCTION(Not);
5150
5151 private:
5152  DISALLOW_COPY_AND_ASSIGN(HNot);
5153};
5154
5155class HBooleanNot FINAL : public HUnaryOperation {
5156 public:
5157  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5158      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5159
5160  bool CanBeMoved() const OVERRIDE { return true; }
5161  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5162    return true;
5163  }
5164
5165  template <typename T> static bool Compute(T x) {
5166    DCHECK(IsUint<1>(x)) << x;
5167    return !x;
5168  }
5169
5170  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5171    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5172  }
5173  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5174    LOG(FATAL) << DebugName() << " is not defined for long values";
5175    UNREACHABLE();
5176  }
5177  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5178    LOG(FATAL) << DebugName() << " is not defined for float values";
5179    UNREACHABLE();
5180  }
5181  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5182    LOG(FATAL) << DebugName() << " is not defined for double values";
5183    UNREACHABLE();
5184  }
5185
5186  DECLARE_INSTRUCTION(BooleanNot);
5187
5188 private:
5189  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5190};
5191
5192class HTypeConversion FINAL : public HExpression<1> {
5193 public:
5194  // Instantiate a type conversion of `input` to `result_type`.
5195  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5196      : HExpression(result_type, SideEffects::None(), dex_pc) {
5197    SetRawInputAt(0, input);
5198    // Invariant: We should never generate a conversion to a Boolean value.
5199    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5200  }
5201
5202  HInstruction* GetInput() const { return InputAt(0); }
5203  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5204  Primitive::Type GetResultType() const { return GetType(); }
5205
5206  bool CanBeMoved() const OVERRIDE { return true; }
5207  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5208    return true;
5209  }
5210
5211  // Try to statically evaluate the conversion and return a HConstant
5212  // containing the result.  If the input cannot be converted, return nullptr.
5213  HConstant* TryStaticEvaluation() const;
5214
5215  DECLARE_INSTRUCTION(TypeConversion);
5216
5217 private:
5218  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5219};
5220
5221static constexpr uint32_t kNoRegNumber = -1;
5222
5223class HNullCheck FINAL : public HExpression<1> {
5224 public:
5225  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5226  // constructor.
5227  HNullCheck(HInstruction* value, uint32_t dex_pc)
5228      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5229    SetRawInputAt(0, value);
5230  }
5231
5232  bool CanBeMoved() const OVERRIDE { return true; }
5233  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5234    return true;
5235  }
5236
5237  bool NeedsEnvironment() const OVERRIDE { return true; }
5238
5239  bool CanThrow() const OVERRIDE { return true; }
5240
5241  bool CanBeNull() const OVERRIDE { return false; }
5242
5243
5244  DECLARE_INSTRUCTION(NullCheck);
5245
5246 private:
5247  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5248};
5249
5250// Embeds an ArtField and all the information required by the compiler. We cache
5251// that information to avoid requiring the mutator lock every time we need it.
5252class FieldInfo : public ValueObject {
5253 public:
5254  FieldInfo(ArtField* field,
5255            MemberOffset field_offset,
5256            Primitive::Type field_type,
5257            bool is_volatile,
5258            uint32_t index,
5259            uint16_t declaring_class_def_index,
5260            const DexFile& dex_file)
5261      : field_(field),
5262        field_offset_(field_offset),
5263        field_type_(field_type),
5264        is_volatile_(is_volatile),
5265        index_(index),
5266        declaring_class_def_index_(declaring_class_def_index),
5267        dex_file_(dex_file) {}
5268
5269  ArtField* GetField() const { return field_; }
5270  MemberOffset GetFieldOffset() const { return field_offset_; }
5271  Primitive::Type GetFieldType() const { return field_type_; }
5272  uint32_t GetFieldIndex() const { return index_; }
5273  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5274  const DexFile& GetDexFile() const { return dex_file_; }
5275  bool IsVolatile() const { return is_volatile_; }
5276
5277 private:
5278  ArtField* const field_;
5279  const MemberOffset field_offset_;
5280  const Primitive::Type field_type_;
5281  const bool is_volatile_;
5282  const uint32_t index_;
5283  const uint16_t declaring_class_def_index_;
5284  const DexFile& dex_file_;
5285};
5286
5287class HInstanceFieldGet FINAL : public HExpression<1> {
5288 public:
5289  HInstanceFieldGet(HInstruction* value,
5290                    ArtField* field,
5291                    Primitive::Type field_type,
5292                    MemberOffset field_offset,
5293                    bool is_volatile,
5294                    uint32_t field_idx,
5295                    uint16_t declaring_class_def_index,
5296                    const DexFile& dex_file,
5297                    uint32_t dex_pc)
5298      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5299        field_info_(field,
5300                    field_offset,
5301                    field_type,
5302                    is_volatile,
5303                    field_idx,
5304                    declaring_class_def_index,
5305                    dex_file) {
5306    SetRawInputAt(0, value);
5307  }
5308
5309  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5310
5311  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5312    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5313    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5314  }
5315
5316  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5317    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5318  }
5319
5320  size_t ComputeHashCode() const OVERRIDE {
5321    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5322  }
5323
5324  const FieldInfo& GetFieldInfo() const { return field_info_; }
5325  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5326  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5327  bool IsVolatile() const { return field_info_.IsVolatile(); }
5328
5329  DECLARE_INSTRUCTION(InstanceFieldGet);
5330
5331 private:
5332  const FieldInfo field_info_;
5333
5334  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5335};
5336
5337class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5338 public:
5339  HInstanceFieldSet(HInstruction* object,
5340                    HInstruction* value,
5341                    ArtField* field,
5342                    Primitive::Type field_type,
5343                    MemberOffset field_offset,
5344                    bool is_volatile,
5345                    uint32_t field_idx,
5346                    uint16_t declaring_class_def_index,
5347                    const DexFile& dex_file,
5348                    uint32_t dex_pc)
5349      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5350        field_info_(field,
5351                    field_offset,
5352                    field_type,
5353                    is_volatile,
5354                    field_idx,
5355                    declaring_class_def_index,
5356                    dex_file) {
5357    SetPackedFlag<kFlagValueCanBeNull>(true);
5358    SetRawInputAt(0, object);
5359    SetRawInputAt(1, value);
5360  }
5361
5362  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5363    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5364  }
5365
5366  const FieldInfo& GetFieldInfo() const { return field_info_; }
5367  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5368  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5369  bool IsVolatile() const { return field_info_.IsVolatile(); }
5370  HInstruction* GetValue() const { return InputAt(1); }
5371  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5372  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5373
5374  DECLARE_INSTRUCTION(InstanceFieldSet);
5375
5376 private:
5377  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5378  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5379  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5380                "Too many packed fields.");
5381
5382  const FieldInfo field_info_;
5383
5384  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5385};
5386
5387class HArrayGet FINAL : public HExpression<2> {
5388 public:
5389  HArrayGet(HInstruction* array,
5390            HInstruction* index,
5391            Primitive::Type type,
5392            uint32_t dex_pc,
5393            bool is_string_char_at = false)
5394      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5395    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5396    SetRawInputAt(0, array);
5397    SetRawInputAt(1, index);
5398  }
5399
5400  bool CanBeMoved() const OVERRIDE { return true; }
5401  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5402    return true;
5403  }
5404  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5405    // TODO: We can be smarter here.
5406    // Currently, unless the array is the result of NewArray, the array access is always
5407    // preceded by some form of null NullCheck necessary for the bounds check, usually
5408    // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5409    // dynamic BCE. There are cases when these could be removed to produce better code.
5410    // If we ever add optimizations to do so we should allow an implicit check here
5411    // (as long as the address falls in the first page).
5412    //
5413    // As an example of such fancy optimization, we could eliminate BoundsCheck for
5414    //     a = cond ? new int[1] : null;
5415    //     a[0];  // The Phi does not need bounds check for either input.
5416    return false;
5417  }
5418
5419  bool IsEquivalentOf(HArrayGet* other) const {
5420    bool result = (GetDexPc() == other->GetDexPc());
5421    if (kIsDebugBuild && result) {
5422      DCHECK_EQ(GetBlock(), other->GetBlock());
5423      DCHECK_EQ(GetArray(), other->GetArray());
5424      DCHECK_EQ(GetIndex(), other->GetIndex());
5425      if (Primitive::IsIntOrLongType(GetType())) {
5426        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5427      } else {
5428        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5429        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5430      }
5431    }
5432    return result;
5433  }
5434
5435  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5436
5437  HInstruction* GetArray() const { return InputAt(0); }
5438  HInstruction* GetIndex() const { return InputAt(1); }
5439
5440  DECLARE_INSTRUCTION(ArrayGet);
5441
5442 private:
5443  // We treat a String as an array, creating the HArrayGet from String.charAt()
5444  // intrinsic in the instruction simplifier. We can always determine whether
5445  // a particular HArrayGet is actually a String.charAt() by looking at the type
5446  // of the input but that requires holding the mutator lock, so we prefer to use
5447  // a flag, so that code generators don't need to do the locking.
5448  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5449  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5450  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5451                "Too many packed fields.");
5452
5453  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5454};
5455
5456class HArraySet FINAL : public HTemplateInstruction<3> {
5457 public:
5458  HArraySet(HInstruction* array,
5459            HInstruction* index,
5460            HInstruction* value,
5461            Primitive::Type expected_component_type,
5462            uint32_t dex_pc)
5463      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5464    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5465    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5466    SetPackedFlag<kFlagValueCanBeNull>(true);
5467    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5468    SetRawInputAt(0, array);
5469    SetRawInputAt(1, index);
5470    SetRawInputAt(2, value);
5471    // Make a best guess now, may be refined during SSA building.
5472    ComputeSideEffects();
5473  }
5474
5475  bool NeedsEnvironment() const OVERRIDE {
5476    // We call a runtime method to throw ArrayStoreException.
5477    return NeedsTypeCheck();
5478  }
5479
5480  // Can throw ArrayStoreException.
5481  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5482
5483  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5484    // TODO: Same as for ArrayGet.
5485    return false;
5486  }
5487
5488  void ClearNeedsTypeCheck() {
5489    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5490  }
5491
5492  void ClearValueCanBeNull() {
5493    SetPackedFlag<kFlagValueCanBeNull>(false);
5494  }
5495
5496  void SetStaticTypeOfArrayIsObjectArray() {
5497    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5498  }
5499
5500  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5501  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5502  bool StaticTypeOfArrayIsObjectArray() const {
5503    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5504  }
5505
5506  HInstruction* GetArray() const { return InputAt(0); }
5507  HInstruction* GetIndex() const { return InputAt(1); }
5508  HInstruction* GetValue() const { return InputAt(2); }
5509
5510  Primitive::Type GetComponentType() const {
5511    // The Dex format does not type floating point index operations. Since the
5512    // `expected_component_type_` is set during building and can therefore not
5513    // be correct, we also check what is the value type. If it is a floating
5514    // point type, we must use that type.
5515    Primitive::Type value_type = GetValue()->GetType();
5516    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5517        ? value_type
5518        : GetRawExpectedComponentType();
5519  }
5520
5521  Primitive::Type GetRawExpectedComponentType() const {
5522    return GetPackedField<ExpectedComponentTypeField>();
5523  }
5524
5525  void ComputeSideEffects() {
5526    Primitive::Type type = GetComponentType();
5527    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5528        SideEffectsForArchRuntimeCalls(type)));
5529  }
5530
5531  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5532    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5533  }
5534
5535  DECLARE_INSTRUCTION(ArraySet);
5536
5537 private:
5538  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5539  static constexpr size_t kFieldExpectedComponentTypeSize =
5540      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5541  static constexpr size_t kFlagNeedsTypeCheck =
5542      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5543  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5544  // Cached information for the reference_type_info_ so that codegen
5545  // does not need to inspect the static type.
5546  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5547  static constexpr size_t kNumberOfArraySetPackedBits =
5548      kFlagStaticTypeOfArrayIsObjectArray + 1;
5549  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5550  using ExpectedComponentTypeField =
5551      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5552
5553  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5554};
5555
5556class HArrayLength FINAL : public HExpression<1> {
5557 public:
5558  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5559      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5560    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5561    // Note that arrays do not change length, so the instruction does not
5562    // depend on any write.
5563    SetRawInputAt(0, array);
5564  }
5565
5566  bool CanBeMoved() const OVERRIDE { return true; }
5567  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5568    return true;
5569  }
5570  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5571    return obj == InputAt(0);
5572  }
5573
5574  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5575
5576  DECLARE_INSTRUCTION(ArrayLength);
5577
5578 private:
5579  // We treat a String as an array, creating the HArrayLength from String.length()
5580  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5581  // determine whether a particular HArrayLength is actually a String.length() by
5582  // looking at the type of the input but that requires holding the mutator lock, so
5583  // we prefer to use a flag, so that code generators don't need to do the locking.
5584  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5585  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5586  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5587                "Too many packed fields.");
5588
5589  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5590};
5591
5592class HBoundsCheck FINAL : public HExpression<2> {
5593 public:
5594  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5595  // constructor.
5596  HBoundsCheck(HInstruction* index,
5597               HInstruction* length,
5598               uint32_t dex_pc,
5599               bool string_char_at = false)
5600      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5601    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5602    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5603    SetRawInputAt(0, index);
5604    SetRawInputAt(1, length);
5605  }
5606
5607  bool CanBeMoved() const OVERRIDE { return true; }
5608  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5609    return true;
5610  }
5611
5612  bool NeedsEnvironment() const OVERRIDE { return true; }
5613
5614  bool CanThrow() const OVERRIDE { return true; }
5615
5616  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5617
5618  HInstruction* GetIndex() const { return InputAt(0); }
5619
5620  DECLARE_INSTRUCTION(BoundsCheck);
5621
5622 private:
5623  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5624
5625  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5626};
5627
5628class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5629 public:
5630  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5631      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5632
5633  bool NeedsEnvironment() const OVERRIDE {
5634    return true;
5635  }
5636
5637  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5638  SlowPathCode* GetSlowPath() const { return slow_path_; }
5639
5640  DECLARE_INSTRUCTION(SuspendCheck);
5641
5642 private:
5643  // Only used for code generation, in order to share the same slow path between back edges
5644  // of a same loop.
5645  SlowPathCode* slow_path_;
5646
5647  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5648};
5649
5650// Pseudo-instruction which provides the native debugger with mapping information.
5651// It ensures that we can generate line number and local variables at this point.
5652class HNativeDebugInfo : public HTemplateInstruction<0> {
5653 public:
5654  explicit HNativeDebugInfo(uint32_t dex_pc)
5655      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5656
5657  bool NeedsEnvironment() const OVERRIDE {
5658    return true;
5659  }
5660
5661  DECLARE_INSTRUCTION(NativeDebugInfo);
5662
5663 private:
5664  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5665};
5666
5667/**
5668 * Instruction to load a Class object.
5669 */
5670class HLoadClass FINAL : public HInstruction {
5671 public:
5672  // Determines how to load the Class.
5673  enum class LoadKind {
5674    // We cannot load this class. See HSharpening::SharpenLoadClass.
5675    kInvalid = -1,
5676
5677    // Use the Class* from the method's own ArtMethod*.
5678    kReferrersClass,
5679
5680    // Use PC-relative boot image Class* address that will be known at link time.
5681    // Used for boot image classes referenced by boot image code.
5682    kBootImageLinkTimePcRelative,
5683
5684    // Use a known boot image Class* address, embedded in the code by the codegen.
5685    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5686    kBootImageAddress,
5687
5688    // Load from an entry in the .bss section using a PC-relative load.
5689    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5690    kBssEntry,
5691
5692    // Load from the root table associated with the JIT compiled method.
5693    kJitTableAddress,
5694
5695    // Load from resolved types array accessed through the class loaded from
5696    // the compiled method's own ArtMethod*. This is the default access type when
5697    // all other types are unavailable.
5698    kDexCacheViaMethod,
5699
5700    kLast = kDexCacheViaMethod
5701  };
5702
5703  HLoadClass(HCurrentMethod* current_method,
5704             dex::TypeIndex type_index,
5705             const DexFile& dex_file,
5706             Handle<mirror::Class> klass,
5707             bool is_referrers_class,
5708             uint32_t dex_pc,
5709             bool needs_access_check)
5710      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5711        special_input_(HUserRecord<HInstruction*>(current_method)),
5712        type_index_(type_index),
5713        dex_file_(dex_file),
5714        klass_(klass),
5715        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5716    // Referrers class should not need access check. We never inline unverified
5717    // methods so we can't possibly end up in this situation.
5718    DCHECK(!is_referrers_class || !needs_access_check);
5719
5720    SetPackedField<LoadKindField>(
5721        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5722    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5723    SetPackedFlag<kFlagIsInBootImage>(false);
5724    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5725  }
5726
5727  void SetLoadKind(LoadKind load_kind);
5728
5729  LoadKind GetLoadKind() const {
5730    return GetPackedField<LoadKindField>();
5731  }
5732
5733  bool CanBeMoved() const OVERRIDE { return true; }
5734
5735  bool InstructionDataEquals(const HInstruction* other) const;
5736
5737  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5738
5739  bool CanBeNull() const OVERRIDE { return false; }
5740
5741  bool NeedsEnvironment() const OVERRIDE {
5742    return CanCallRuntime();
5743  }
5744
5745  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5746    // The entrypoint the code generator is going to call does not do
5747    // clinit of the class.
5748    DCHECK(!NeedsAccessCheck());
5749    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5750  }
5751
5752  bool CanCallRuntime() const {
5753    return NeedsAccessCheck() ||
5754           MustGenerateClinitCheck() ||
5755           GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5756           GetLoadKind() == LoadKind::kBssEntry;
5757  }
5758
5759  bool CanThrow() const OVERRIDE {
5760    return NeedsAccessCheck() ||
5761           MustGenerateClinitCheck() ||
5762           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5763           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5764           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5765           ((GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5766             GetLoadKind() == LoadKind::kBssEntry) &&
5767            !IsInBootImage());
5768  }
5769
5770  ReferenceTypeInfo GetLoadedClassRTI() {
5771    return loaded_class_rti_;
5772  }
5773
5774  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5775    // Make sure we only set exact types (the loaded class should never be merged).
5776    DCHECK(rti.IsExact());
5777    loaded_class_rti_ = rti;
5778  }
5779
5780  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5781  const DexFile& GetDexFile() const { return dex_file_; }
5782
5783  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5784    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5785  }
5786
5787  static SideEffects SideEffectsForArchRuntimeCalls() {
5788    return SideEffects::CanTriggerGC();
5789  }
5790
5791  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5792  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5793  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5794  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5795
5796  void MarkInBootImage() {
5797    SetPackedFlag<kFlagIsInBootImage>(true);
5798  }
5799
5800  void AddSpecialInput(HInstruction* special_input);
5801
5802  using HInstruction::GetInputRecords;  // Keep the const version visible.
5803  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5804    return ArrayRef<HUserRecord<HInstruction*>>(
5805        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5806  }
5807
5808  Primitive::Type GetType() const OVERRIDE {
5809    return Primitive::kPrimNot;
5810  }
5811
5812  Handle<mirror::Class> GetClass() const {
5813    return klass_;
5814  }
5815
5816  DECLARE_INSTRUCTION(LoadClass);
5817
5818 private:
5819  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5820  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5821  // Whether this instruction must generate the initialization check.
5822  // Used for code generation.
5823  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5824  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5825  static constexpr size_t kFieldLoadKindSize =
5826      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5827  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5828  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5829  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5830
5831  static bool HasTypeReference(LoadKind load_kind) {
5832    return load_kind == LoadKind::kReferrersClass ||
5833        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5834        load_kind == LoadKind::kBssEntry ||
5835        load_kind == LoadKind::kDexCacheViaMethod;
5836  }
5837
5838  void SetLoadKindInternal(LoadKind load_kind);
5839
5840  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5841  // For other load kinds it's empty or possibly some architecture-specific instruction
5842  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5843  HUserRecord<HInstruction*> special_input_;
5844
5845  // A type index and dex file where the class can be accessed. The dex file can be:
5846  // - The compiling method's dex file if the class is defined there too.
5847  // - The compiling method's dex file if the class is referenced there.
5848  // - The dex file where the class is defined. When the load kind can only be
5849  //   kBssEntry or kDexCacheViaMethod, we cannot emit code for this `HLoadClass`.
5850  const dex::TypeIndex type_index_;
5851  const DexFile& dex_file_;
5852
5853  Handle<mirror::Class> klass_;
5854
5855  ReferenceTypeInfo loaded_class_rti_;
5856
5857  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5858};
5859std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5860
5861// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5862inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5863  // The special input is used for PC-relative loads on some architectures,
5864  // including literal pool loads, which are PC-relative too.
5865  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5866         GetLoadKind() == LoadKind::kBootImageAddress ||
5867         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5868  DCHECK(special_input_.GetInstruction() == nullptr);
5869  special_input_ = HUserRecord<HInstruction*>(special_input);
5870  special_input->AddUseAt(this, 0);
5871}
5872
5873class HLoadString FINAL : public HInstruction {
5874 public:
5875  // Determines how to load the String.
5876  enum class LoadKind {
5877    // Use PC-relative boot image String* address that will be known at link time.
5878    // Used for boot image strings referenced by boot image code.
5879    kBootImageLinkTimePcRelative,
5880
5881    // Use a known boot image String* address, embedded in the code by the codegen.
5882    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5883    kBootImageAddress,
5884
5885    // Load from an entry in the .bss section using a PC-relative load.
5886    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5887    kBssEntry,
5888
5889    // Load from the root table associated with the JIT compiled method.
5890    kJitTableAddress,
5891
5892    // Load from resolved strings array accessed through the class loaded from
5893    // the compiled method's own ArtMethod*. This is the default access type when
5894    // all other types are unavailable.
5895    kDexCacheViaMethod,
5896
5897    kLast = kDexCacheViaMethod,
5898  };
5899
5900  HLoadString(HCurrentMethod* current_method,
5901              dex::StringIndex string_index,
5902              const DexFile& dex_file,
5903              uint32_t dex_pc)
5904      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5905        special_input_(HUserRecord<HInstruction*>(current_method)),
5906        string_index_(string_index),
5907        dex_file_(dex_file) {
5908    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5909  }
5910
5911  void SetLoadKind(LoadKind load_kind);
5912
5913  LoadKind GetLoadKind() const {
5914    return GetPackedField<LoadKindField>();
5915  }
5916
5917  const DexFile& GetDexFile() const {
5918    return dex_file_;
5919  }
5920
5921  dex::StringIndex GetStringIndex() const {
5922    return string_index_;
5923  }
5924
5925  Handle<mirror::String> GetString() const {
5926    return string_;
5927  }
5928
5929  void SetString(Handle<mirror::String> str) {
5930    string_ = str;
5931  }
5932
5933  bool CanBeMoved() const OVERRIDE { return true; }
5934
5935  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5936
5937  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5938
5939  // Will call the runtime if we need to load the string through
5940  // the dex cache and the string is not guaranteed to be there yet.
5941  bool NeedsEnvironment() const OVERRIDE {
5942    LoadKind load_kind = GetLoadKind();
5943    if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5944        load_kind == LoadKind::kBootImageAddress ||
5945        load_kind == LoadKind::kJitTableAddress) {
5946      return false;
5947    }
5948    return true;
5949  }
5950
5951  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5952    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5953  }
5954
5955  bool CanBeNull() const OVERRIDE { return false; }
5956  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5957
5958  static SideEffects SideEffectsForArchRuntimeCalls() {
5959    return SideEffects::CanTriggerGC();
5960  }
5961
5962  void AddSpecialInput(HInstruction* special_input);
5963
5964  using HInstruction::GetInputRecords;  // Keep the const version visible.
5965  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5966    return ArrayRef<HUserRecord<HInstruction*>>(
5967        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5968  }
5969
5970  Primitive::Type GetType() const OVERRIDE {
5971    return Primitive::kPrimNot;
5972  }
5973
5974  DECLARE_INSTRUCTION(LoadString);
5975
5976 private:
5977  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5978  static constexpr size_t kFieldLoadKindSize =
5979      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5980  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5981  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5982  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5983
5984  void SetLoadKindInternal(LoadKind load_kind);
5985
5986  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5987  // For other load kinds it's empty or possibly some architecture-specific instruction
5988  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5989  HUserRecord<HInstruction*> special_input_;
5990
5991  dex::StringIndex string_index_;
5992  const DexFile& dex_file_;
5993
5994  Handle<mirror::String> string_;
5995
5996  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5997};
5998std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5999
6000// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6001inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6002  // The special input is used for PC-relative loads on some architectures,
6003  // including literal pool loads, which are PC-relative too.
6004  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6005         GetLoadKind() == LoadKind::kBssEntry ||
6006         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
6007  // HLoadString::GetInputRecords() returns an empty array at this point,
6008  // so use the GetInputRecords() from the base class to set the input record.
6009  DCHECK(special_input_.GetInstruction() == nullptr);
6010  special_input_ = HUserRecord<HInstruction*>(special_input);
6011  special_input->AddUseAt(this, 0);
6012}
6013
6014/**
6015 * Performs an initialization check on its Class object input.
6016 */
6017class HClinitCheck FINAL : public HExpression<1> {
6018 public:
6019  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6020      : HExpression(
6021            Primitive::kPrimNot,
6022            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6023            dex_pc) {
6024    SetRawInputAt(0, constant);
6025  }
6026
6027  bool CanBeMoved() const OVERRIDE { return true; }
6028  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6029    return true;
6030  }
6031
6032  bool NeedsEnvironment() const OVERRIDE {
6033    // May call runtime to initialize the class.
6034    return true;
6035  }
6036
6037  bool CanThrow() const OVERRIDE { return true; }
6038
6039  HLoadClass* GetLoadClass() const {
6040    DCHECK(InputAt(0)->IsLoadClass());
6041    return InputAt(0)->AsLoadClass();
6042  }
6043
6044  DECLARE_INSTRUCTION(ClinitCheck);
6045
6046 private:
6047  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6048};
6049
6050class HStaticFieldGet FINAL : public HExpression<1> {
6051 public:
6052  HStaticFieldGet(HInstruction* cls,
6053                  ArtField* field,
6054                  Primitive::Type field_type,
6055                  MemberOffset field_offset,
6056                  bool is_volatile,
6057                  uint32_t field_idx,
6058                  uint16_t declaring_class_def_index,
6059                  const DexFile& dex_file,
6060                  uint32_t dex_pc)
6061      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6062        field_info_(field,
6063                    field_offset,
6064                    field_type,
6065                    is_volatile,
6066                    field_idx,
6067                    declaring_class_def_index,
6068                    dex_file) {
6069    SetRawInputAt(0, cls);
6070  }
6071
6072
6073  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6074
6075  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6076    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6077    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6078  }
6079
6080  size_t ComputeHashCode() const OVERRIDE {
6081    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6082  }
6083
6084  const FieldInfo& GetFieldInfo() const { return field_info_; }
6085  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6086  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6087  bool IsVolatile() const { return field_info_.IsVolatile(); }
6088
6089  DECLARE_INSTRUCTION(StaticFieldGet);
6090
6091 private:
6092  const FieldInfo field_info_;
6093
6094  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6095};
6096
6097class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6098 public:
6099  HStaticFieldSet(HInstruction* cls,
6100                  HInstruction* value,
6101                  ArtField* field,
6102                  Primitive::Type field_type,
6103                  MemberOffset field_offset,
6104                  bool is_volatile,
6105                  uint32_t field_idx,
6106                  uint16_t declaring_class_def_index,
6107                  const DexFile& dex_file,
6108                  uint32_t dex_pc)
6109      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6110        field_info_(field,
6111                    field_offset,
6112                    field_type,
6113                    is_volatile,
6114                    field_idx,
6115                    declaring_class_def_index,
6116                    dex_file) {
6117    SetPackedFlag<kFlagValueCanBeNull>(true);
6118    SetRawInputAt(0, cls);
6119    SetRawInputAt(1, value);
6120  }
6121
6122  const FieldInfo& GetFieldInfo() const { return field_info_; }
6123  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6124  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6125  bool IsVolatile() const { return field_info_.IsVolatile(); }
6126
6127  HInstruction* GetValue() const { return InputAt(1); }
6128  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6129  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6130
6131  DECLARE_INSTRUCTION(StaticFieldSet);
6132
6133 private:
6134  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6135  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6136  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6137                "Too many packed fields.");
6138
6139  const FieldInfo field_info_;
6140
6141  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6142};
6143
6144class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6145 public:
6146  HUnresolvedInstanceFieldGet(HInstruction* obj,
6147                              Primitive::Type field_type,
6148                              uint32_t field_index,
6149                              uint32_t dex_pc)
6150      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6151        field_index_(field_index) {
6152    SetRawInputAt(0, obj);
6153  }
6154
6155  bool NeedsEnvironment() const OVERRIDE { return true; }
6156  bool CanThrow() const OVERRIDE { return true; }
6157
6158  Primitive::Type GetFieldType() const { return GetType(); }
6159  uint32_t GetFieldIndex() const { return field_index_; }
6160
6161  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6162
6163 private:
6164  const uint32_t field_index_;
6165
6166  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6167};
6168
6169class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6170 public:
6171  HUnresolvedInstanceFieldSet(HInstruction* obj,
6172                              HInstruction* value,
6173                              Primitive::Type field_type,
6174                              uint32_t field_index,
6175                              uint32_t dex_pc)
6176      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6177        field_index_(field_index) {
6178    SetPackedField<FieldTypeField>(field_type);
6179    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6180    SetRawInputAt(0, obj);
6181    SetRawInputAt(1, value);
6182  }
6183
6184  bool NeedsEnvironment() const OVERRIDE { return true; }
6185  bool CanThrow() const OVERRIDE { return true; }
6186
6187  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6188  uint32_t GetFieldIndex() const { return field_index_; }
6189
6190  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6191
6192 private:
6193  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6194  static constexpr size_t kFieldFieldTypeSize =
6195      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6196  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6197      kFieldFieldType + kFieldFieldTypeSize;
6198  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6199                "Too many packed fields.");
6200  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6201
6202  const uint32_t field_index_;
6203
6204  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6205};
6206
6207class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6208 public:
6209  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6210                            uint32_t field_index,
6211                            uint32_t dex_pc)
6212      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6213        field_index_(field_index) {
6214  }
6215
6216  bool NeedsEnvironment() const OVERRIDE { return true; }
6217  bool CanThrow() const OVERRIDE { return true; }
6218
6219  Primitive::Type GetFieldType() const { return GetType(); }
6220  uint32_t GetFieldIndex() const { return field_index_; }
6221
6222  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6223
6224 private:
6225  const uint32_t field_index_;
6226
6227  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6228};
6229
6230class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6231 public:
6232  HUnresolvedStaticFieldSet(HInstruction* value,
6233                            Primitive::Type field_type,
6234                            uint32_t field_index,
6235                            uint32_t dex_pc)
6236      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6237        field_index_(field_index) {
6238    SetPackedField<FieldTypeField>(field_type);
6239    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6240    SetRawInputAt(0, value);
6241  }
6242
6243  bool NeedsEnvironment() const OVERRIDE { return true; }
6244  bool CanThrow() const OVERRIDE { return true; }
6245
6246  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6247  uint32_t GetFieldIndex() const { return field_index_; }
6248
6249  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6250
6251 private:
6252  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6253  static constexpr size_t kFieldFieldTypeSize =
6254      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6255  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6256      kFieldFieldType + kFieldFieldTypeSize;
6257  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6258                "Too many packed fields.");
6259  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6260
6261  const uint32_t field_index_;
6262
6263  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6264};
6265
6266// Implement the move-exception DEX instruction.
6267class HLoadException FINAL : public HExpression<0> {
6268 public:
6269  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6270      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6271
6272  bool CanBeNull() const OVERRIDE { return false; }
6273
6274  DECLARE_INSTRUCTION(LoadException);
6275
6276 private:
6277  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6278};
6279
6280// Implicit part of move-exception which clears thread-local exception storage.
6281// Must not be removed because the runtime expects the TLS to get cleared.
6282class HClearException FINAL : public HTemplateInstruction<0> {
6283 public:
6284  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6285      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6286
6287  DECLARE_INSTRUCTION(ClearException);
6288
6289 private:
6290  DISALLOW_COPY_AND_ASSIGN(HClearException);
6291};
6292
6293class HThrow FINAL : public HTemplateInstruction<1> {
6294 public:
6295  HThrow(HInstruction* exception, uint32_t dex_pc)
6296      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6297    SetRawInputAt(0, exception);
6298  }
6299
6300  bool IsControlFlow() const OVERRIDE { return true; }
6301
6302  bool NeedsEnvironment() const OVERRIDE { return true; }
6303
6304  bool CanThrow() const OVERRIDE { return true; }
6305
6306
6307  DECLARE_INSTRUCTION(Throw);
6308
6309 private:
6310  DISALLOW_COPY_AND_ASSIGN(HThrow);
6311};
6312
6313/**
6314 * Implementation strategies for the code generator of a HInstanceOf
6315 * or `HCheckCast`.
6316 */
6317enum class TypeCheckKind {
6318  kUnresolvedCheck,       // Check against an unresolved type.
6319  kExactCheck,            // Can do a single class compare.
6320  kClassHierarchyCheck,   // Can just walk the super class chain.
6321  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6322  kInterfaceCheck,        // No optimization yet when checking against an interface.
6323  kArrayObjectCheck,      // Can just check if the array is not primitive.
6324  kArrayCheck,            // No optimization yet when checking against a generic array.
6325  kLast = kArrayCheck
6326};
6327
6328std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6329
6330class HInstanceOf FINAL : public HExpression<2> {
6331 public:
6332  HInstanceOf(HInstruction* object,
6333              HLoadClass* constant,
6334              TypeCheckKind check_kind,
6335              uint32_t dex_pc)
6336      : HExpression(Primitive::kPrimBoolean,
6337                    SideEffectsForArchRuntimeCalls(check_kind),
6338                    dex_pc) {
6339    SetPackedField<TypeCheckKindField>(check_kind);
6340    SetPackedFlag<kFlagMustDoNullCheck>(true);
6341    SetRawInputAt(0, object);
6342    SetRawInputAt(1, constant);
6343  }
6344
6345  bool CanBeMoved() const OVERRIDE { return true; }
6346
6347  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6348    return true;
6349  }
6350
6351  bool NeedsEnvironment() const OVERRIDE {
6352    return CanCallRuntime(GetTypeCheckKind());
6353  }
6354
6355  // Used only in code generation.
6356  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6357  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6358  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6359  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6360
6361  static bool CanCallRuntime(TypeCheckKind check_kind) {
6362    // Mips currently does runtime calls for any other checks.
6363    return check_kind != TypeCheckKind::kExactCheck;
6364  }
6365
6366  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6367    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6368  }
6369
6370  DECLARE_INSTRUCTION(InstanceOf);
6371
6372 private:
6373  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6374  static constexpr size_t kFieldTypeCheckKindSize =
6375      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6376  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6377  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6378  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6379  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6380
6381  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6382};
6383
6384class HBoundType FINAL : public HExpression<1> {
6385 public:
6386  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6387      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6388        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6389    SetPackedFlag<kFlagUpperCanBeNull>(true);
6390    SetPackedFlag<kFlagCanBeNull>(true);
6391    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6392    SetRawInputAt(0, input);
6393  }
6394
6395  // {Get,Set}Upper* should only be used in reference type propagation.
6396  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6397  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6398  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6399
6400  void SetCanBeNull(bool can_be_null) {
6401    DCHECK(GetUpperCanBeNull() || !can_be_null);
6402    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6403  }
6404
6405  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6406
6407  DECLARE_INSTRUCTION(BoundType);
6408
6409 private:
6410  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6411  // is false then CanBeNull() cannot be true).
6412  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6413  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6414  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6415  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6416
6417  // Encodes the most upper class that this instruction can have. In other words
6418  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6419  // It is used to bound the type in cases like:
6420  //   if (x instanceof ClassX) {
6421  //     // uper_bound_ will be ClassX
6422  //   }
6423  ReferenceTypeInfo upper_bound_;
6424
6425  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6426};
6427
6428class HCheckCast FINAL : public HTemplateInstruction<2> {
6429 public:
6430  HCheckCast(HInstruction* object,
6431             HLoadClass* constant,
6432             TypeCheckKind check_kind,
6433             uint32_t dex_pc)
6434      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6435    SetPackedField<TypeCheckKindField>(check_kind);
6436    SetPackedFlag<kFlagMustDoNullCheck>(true);
6437    SetRawInputAt(0, object);
6438    SetRawInputAt(1, constant);
6439  }
6440
6441  bool CanBeMoved() const OVERRIDE { return true; }
6442
6443  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6444    return true;
6445  }
6446
6447  bool NeedsEnvironment() const OVERRIDE {
6448    // Instruction may throw a CheckCastError.
6449    return true;
6450  }
6451
6452  bool CanThrow() const OVERRIDE { return true; }
6453
6454  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6455  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6456  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6457  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6458
6459  DECLARE_INSTRUCTION(CheckCast);
6460
6461 private:
6462  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6463  static constexpr size_t kFieldTypeCheckKindSize =
6464      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6465  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6466  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6467  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6468  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6469
6470  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6471};
6472
6473/**
6474 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6475 * @details We define the combined barrier types that are actually required
6476 * by the Java Memory Model, rather than using exactly the terminology from
6477 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6478 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6479 * store atomicity issues, and the recipes there are not always entirely sufficient.
6480 * The current recipe is as follows:
6481 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6482 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6483 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6484 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6485 *    class has final fields.
6486 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6487 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6488 */
6489enum MemBarrierKind {
6490  kAnyStore,
6491  kLoadAny,
6492  kStoreStore,
6493  kAnyAny,
6494  kNTStoreStore,
6495  kLastBarrierKind = kNTStoreStore
6496};
6497std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6498
6499class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6500 public:
6501  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6502      : HTemplateInstruction(
6503            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6504    SetPackedField<BarrierKindField>(barrier_kind);
6505  }
6506
6507  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6508
6509  DECLARE_INSTRUCTION(MemoryBarrier);
6510
6511 private:
6512  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6513  static constexpr size_t kFieldBarrierKindSize =
6514      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6515  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6516      kFieldBarrierKind + kFieldBarrierKindSize;
6517  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6518                "Too many packed fields.");
6519  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6520
6521  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6522};
6523
6524// A constructor fence orders all prior stores to fields that could be accessed via a final field of
6525// the specified object(s), with respect to any subsequent store that might "publish"
6526// (i.e. make visible) the specified object to another thread.
6527//
6528// JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6529// for all final fields (that were set) at the end of the invoked constructor.
6530//
6531// The constructor fence models the freeze actions for the final fields of an object
6532// being constructed (semantically at the end of the constructor). Constructor fences
6533// have a per-object affinity; two separate objects being constructed get two separate
6534// constructor fences.
6535//
6536// (Note: that if calling a super-constructor or forwarding to another constructor,
6537// the freezes would happen at the end of *that* constructor being invoked).
6538//
6539// The memory model guarantees that when the object being constructed is "published" after
6540// constructor completion (i.e. escapes the current thread via a store), then any final field
6541// writes must be observable on other threads (once they observe that publication).
6542//
6543// Further, anything written before the freeze, and read by dereferencing through the final field,
6544// must also be visible (so final object field could itself have an object with non-final fields;
6545// yet the freeze must also extend to them).
6546//
6547// Constructor example:
6548//
6549//     class HasFinal {
6550//        final int field;                              Optimizing IR for <init>()V:
6551//        HasFinal() {
6552//          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
6553//          // freeze(this.field);                      HConstructorFence(this)
6554//        }                                             HReturn
6555//     }
6556//
6557// HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
6558// already-initialized classes; in that case the allocation must act as a "default-initializer"
6559// of the object which effectively writes the class pointer "final field".
6560//
6561// For example, we can model default-initialiation as roughly the equivalent of the following:
6562//
6563//     class Object {
6564//       private final Class header;
6565//     }
6566//
6567//  Java code:                                           Optimizing IR:
6568//
6569//     T new_instance<T>() {
6570//       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
6571//       obj.header = T.class;                           // header write is done by above call.
6572//       // freeze(obj.header)                           HConstructorFence(obj)
6573//       return (T)obj;
6574//     }
6575//
6576// See also:
6577// * CompilerDriver::RequiresConstructorBarrier
6578// * QuasiAtomic::ThreadFenceForConstructor
6579//
6580class HConstructorFence FINAL : public HVariableInputSizeInstruction {
6581                                  // A fence has variable inputs because the inputs can be removed
6582                                  // after prepare_for_register_allocation phase.
6583                                  // (TODO: In the future a fence could freeze multiple objects
6584                                  //        after merging two fences together.)
6585 public:
6586  // `fence_object` is the reference that needs to be protected for correct publication.
6587  //
6588  // It makes sense in the following situations:
6589  // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
6590  // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
6591  //
6592  // After construction the `fence_object` becomes the 0th input.
6593  // This is not an input in a real sense, but just a convenient place to stash the information
6594  // about the associated object.
6595  HConstructorFence(HInstruction* fence_object,
6596                    uint32_t dex_pc,
6597                    ArenaAllocator* arena)
6598    // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
6599    // constraints described in the class header. We claim that these SideEffects constraints
6600    // enforce a superset of the real constraints.
6601    //
6602    // The ordering described above is conservatively modeled with SideEffects as follows:
6603    //
6604    // * To prevent reordering of the publication stores:
6605    // ----> "Reads of objects" is the initial SideEffect.
6606    // * For every primitive final field store in the constructor:
6607    // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
6608    // * If there are any stores to reference final fields in the constructor:
6609    // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
6610    //       that are reachable from `fence_object` also need to be prevented for reordering
6611    //       (and we do not want to do alias analysis to figure out what those stores are).
6612    //
6613    // In the implementation, this initially starts out as an "all reads" side effect; this is an
6614    // even more conservative approach than the one described above, and prevents all of the
6615    // above reordering without analyzing any of the instructions in the constructor.
6616    //
6617    // If in a later phase we discover that there are no writes to reference final fields,
6618    // we can refine the side effect to a smaller set of type reads (see above constraints).
6619      : HVariableInputSizeInstruction(SideEffects::AllReads(),
6620                                      dex_pc,
6621                                      arena,
6622                                      /* number_of_inputs */ 1,
6623                                      kArenaAllocConstructorFenceInputs) {
6624    DCHECK(fence_object != nullptr);
6625    SetRawInputAt(0, fence_object);
6626  }
6627
6628  // The object associated with this constructor fence.
6629  //
6630  // (Note: This will be null after the prepare_for_register_allocation phase,
6631  // as all constructor fence inputs are removed there).
6632  HInstruction* GetFenceObject() const {
6633    return InputAt(0);
6634  }
6635
6636  // Find all the HConstructorFence uses (`fence_use`) for `this` and:
6637  // - Delete `fence_use` from `this`'s use list.
6638  // - Delete `this` from `fence_use`'s inputs list.
6639  // - If the `fence_use` is dead, remove it from the graph.
6640  //
6641  // A fence is considered dead once it no longer has any uses
6642  // and all of the inputs are dead.
6643  //
6644  // This must *not* be called during/after prepare_for_register_allocation,
6645  // because that removes all the inputs to the fences but the fence is actually
6646  // still considered live.
6647  static void RemoveConstructorFences(HInstruction* instruction);
6648
6649  // Check if this constructor fence is protecting
6650  // an HNewInstance or HNewArray that is also the immediate
6651  // predecessor of `this`.
6652  //
6653  // Returns the associated HNewArray or HNewInstance,
6654  // or null otherwise.
6655  HInstruction* GetAssociatedAllocation();
6656
6657  DECLARE_INSTRUCTION(ConstructorFence);
6658
6659 private:
6660  DISALLOW_COPY_AND_ASSIGN(HConstructorFence);
6661};
6662
6663class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6664 public:
6665  enum class OperationKind {
6666    kEnter,
6667    kExit,
6668    kLast = kExit
6669  };
6670
6671  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6672    : HTemplateInstruction(
6673          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6674          dex_pc) {
6675    SetPackedField<OperationKindField>(kind);
6676    SetRawInputAt(0, object);
6677  }
6678
6679  // Instruction may go into runtime, so we need an environment.
6680  bool NeedsEnvironment() const OVERRIDE { return true; }
6681
6682  bool CanThrow() const OVERRIDE {
6683    // Verifier guarantees that monitor-exit cannot throw.
6684    // This is important because it allows the HGraphBuilder to remove
6685    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6686    return IsEnter();
6687  }
6688
6689  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6690  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6691
6692  DECLARE_INSTRUCTION(MonitorOperation);
6693
6694 private:
6695  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6696  static constexpr size_t kFieldOperationKindSize =
6697      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6698  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6699      kFieldOperationKind + kFieldOperationKindSize;
6700  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6701                "Too many packed fields.");
6702  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6703
6704 private:
6705  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6706};
6707
6708class HSelect FINAL : public HExpression<3> {
6709 public:
6710  HSelect(HInstruction* condition,
6711          HInstruction* true_value,
6712          HInstruction* false_value,
6713          uint32_t dex_pc)
6714      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6715    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6716
6717    // First input must be `true_value` or `false_value` to allow codegens to
6718    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6719    // that architectures which implement HSelect as a conditional move also
6720    // will not need to invert the condition.
6721    SetRawInputAt(0, false_value);
6722    SetRawInputAt(1, true_value);
6723    SetRawInputAt(2, condition);
6724  }
6725
6726  HInstruction* GetFalseValue() const { return InputAt(0); }
6727  HInstruction* GetTrueValue() const { return InputAt(1); }
6728  HInstruction* GetCondition() const { return InputAt(2); }
6729
6730  bool CanBeMoved() const OVERRIDE { return true; }
6731  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6732    return true;
6733  }
6734
6735  bool CanBeNull() const OVERRIDE {
6736    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6737  }
6738
6739  DECLARE_INSTRUCTION(Select);
6740
6741 private:
6742  DISALLOW_COPY_AND_ASSIGN(HSelect);
6743};
6744
6745class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6746 public:
6747  MoveOperands(Location source,
6748               Location destination,
6749               Primitive::Type type,
6750               HInstruction* instruction)
6751      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6752
6753  Location GetSource() const { return source_; }
6754  Location GetDestination() const { return destination_; }
6755
6756  void SetSource(Location value) { source_ = value; }
6757  void SetDestination(Location value) { destination_ = value; }
6758
6759  // The parallel move resolver marks moves as "in-progress" by clearing the
6760  // destination (but not the source).
6761  Location MarkPending() {
6762    DCHECK(!IsPending());
6763    Location dest = destination_;
6764    destination_ = Location::NoLocation();
6765    return dest;
6766  }
6767
6768  void ClearPending(Location dest) {
6769    DCHECK(IsPending());
6770    destination_ = dest;
6771  }
6772
6773  bool IsPending() const {
6774    DCHECK(source_.IsValid() || destination_.IsInvalid());
6775    return destination_.IsInvalid() && source_.IsValid();
6776  }
6777
6778  // True if this blocks a move from the given location.
6779  bool Blocks(Location loc) const {
6780    return !IsEliminated() && source_.OverlapsWith(loc);
6781  }
6782
6783  // A move is redundant if it's been eliminated, if its source and
6784  // destination are the same, or if its destination is unneeded.
6785  bool IsRedundant() const {
6786    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6787  }
6788
6789  // We clear both operands to indicate move that's been eliminated.
6790  void Eliminate() {
6791    source_ = destination_ = Location::NoLocation();
6792  }
6793
6794  bool IsEliminated() const {
6795    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6796    return source_.IsInvalid();
6797  }
6798
6799  Primitive::Type GetType() const { return type_; }
6800
6801  bool Is64BitMove() const {
6802    return Primitive::Is64BitType(type_);
6803  }
6804
6805  HInstruction* GetInstruction() const { return instruction_; }
6806
6807 private:
6808  Location source_;
6809  Location destination_;
6810  // The type this move is for.
6811  Primitive::Type type_;
6812  // The instruction this move is assocatied with. Null when this move is
6813  // for moving an input in the expected locations of user (including a phi user).
6814  // This is only used in debug mode, to ensure we do not connect interval siblings
6815  // in the same parallel move.
6816  HInstruction* instruction_;
6817};
6818
6819std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6820
6821static constexpr size_t kDefaultNumberOfMoves = 4;
6822
6823class HParallelMove FINAL : public HTemplateInstruction<0> {
6824 public:
6825  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6826      : HTemplateInstruction(SideEffects::None(), dex_pc),
6827        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6828    moves_.reserve(kDefaultNumberOfMoves);
6829  }
6830
6831  void AddMove(Location source,
6832               Location destination,
6833               Primitive::Type type,
6834               HInstruction* instruction) {
6835    DCHECK(source.IsValid());
6836    DCHECK(destination.IsValid());
6837    if (kIsDebugBuild) {
6838      if (instruction != nullptr) {
6839        for (const MoveOperands& move : moves_) {
6840          if (move.GetInstruction() == instruction) {
6841            // Special case the situation where the move is for the spill slot
6842            // of the instruction.
6843            if ((GetPrevious() == instruction)
6844                || ((GetPrevious() == nullptr)
6845                    && instruction->IsPhi()
6846                    && instruction->GetBlock() == GetBlock())) {
6847              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6848                  << "Doing parallel moves for the same instruction.";
6849            } else {
6850              DCHECK(false) << "Doing parallel moves for the same instruction.";
6851            }
6852          }
6853        }
6854      }
6855      for (const MoveOperands& move : moves_) {
6856        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6857            << "Overlapped destination for two moves in a parallel move: "
6858            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6859            << source << " ==> " << destination;
6860      }
6861    }
6862    moves_.emplace_back(source, destination, type, instruction);
6863  }
6864
6865  MoveOperands* MoveOperandsAt(size_t index) {
6866    return &moves_[index];
6867  }
6868
6869  size_t NumMoves() const { return moves_.size(); }
6870
6871  DECLARE_INSTRUCTION(ParallelMove);
6872
6873 private:
6874  ArenaVector<MoveOperands> moves_;
6875
6876  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6877};
6878
6879}  // namespace art
6880
6881#include "nodes_vector.h"
6882
6883#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6884#include "nodes_shared.h"
6885#endif
6886#ifdef ART_ENABLE_CODEGEN_arm
6887#include "nodes_arm.h"
6888#endif
6889#ifdef ART_ENABLE_CODEGEN_mips
6890#include "nodes_mips.h"
6891#endif
6892#ifdef ART_ENABLE_CODEGEN_x86
6893#include "nodes_x86.h"
6894#endif
6895
6896namespace art {
6897
6898class HGraphVisitor : public ValueObject {
6899 public:
6900  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6901  virtual ~HGraphVisitor() {}
6902
6903  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6904  virtual void VisitBasicBlock(HBasicBlock* block);
6905
6906  // Visit the graph following basic block insertion order.
6907  void VisitInsertionOrder();
6908
6909  // Visit the graph following dominator tree reverse post-order.
6910  void VisitReversePostOrder();
6911
6912  HGraph* GetGraph() const { return graph_; }
6913
6914  // Visit functions for instruction classes.
6915#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6916  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6917
6918  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6919
6920#undef DECLARE_VISIT_INSTRUCTION
6921
6922 private:
6923  HGraph* const graph_;
6924
6925  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6926};
6927
6928class HGraphDelegateVisitor : public HGraphVisitor {
6929 public:
6930  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6931  virtual ~HGraphDelegateVisitor() {}
6932
6933  // Visit functions that delegate to to super class.
6934#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6935  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6936
6937  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6938
6939#undef DECLARE_VISIT_INSTRUCTION
6940
6941 private:
6942  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6943};
6944
6945// Iterator over the blocks that art part of the loop. Includes blocks part
6946// of an inner loop. The order in which the blocks are iterated is on their
6947// block id.
6948class HBlocksInLoopIterator : public ValueObject {
6949 public:
6950  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6951      : blocks_in_loop_(info.GetBlocks()),
6952        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6953        index_(0) {
6954    if (!blocks_in_loop_.IsBitSet(index_)) {
6955      Advance();
6956    }
6957  }
6958
6959  bool Done() const { return index_ == blocks_.size(); }
6960  HBasicBlock* Current() const { return blocks_[index_]; }
6961  void Advance() {
6962    ++index_;
6963    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6964      if (blocks_in_loop_.IsBitSet(index_)) {
6965        break;
6966      }
6967    }
6968  }
6969
6970 private:
6971  const BitVector& blocks_in_loop_;
6972  const ArenaVector<HBasicBlock*>& blocks_;
6973  size_t index_;
6974
6975  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6976};
6977
6978// Iterator over the blocks that art part of the loop. Includes blocks part
6979// of an inner loop. The order in which the blocks are iterated is reverse
6980// post order.
6981class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6982 public:
6983  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6984      : blocks_in_loop_(info.GetBlocks()),
6985        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6986        index_(0) {
6987    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6988      Advance();
6989    }
6990  }
6991
6992  bool Done() const { return index_ == blocks_.size(); }
6993  HBasicBlock* Current() const { return blocks_[index_]; }
6994  void Advance() {
6995    ++index_;
6996    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6997      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6998        break;
6999      }
7000    }
7001  }
7002
7003 private:
7004  const BitVector& blocks_in_loop_;
7005  const ArenaVector<HBasicBlock*>& blocks_;
7006  size_t index_;
7007
7008  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7009};
7010
7011// Returns int64_t value of a properly typed constant.
7012inline int64_t Int64FromConstant(HConstant* constant) {
7013  if (constant->IsIntConstant()) {
7014    return constant->AsIntConstant()->GetValue();
7015  } else if (constant->IsLongConstant()) {
7016    return constant->AsLongConstant()->GetValue();
7017  } else {
7018    DCHECK(constant->IsNullConstant()) << constant->DebugName();
7019    return 0;
7020  }
7021}
7022
7023// Returns true iff instruction is an integral constant (and sets value on success).
7024inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7025  if (instruction->IsIntConstant()) {
7026    *value = instruction->AsIntConstant()->GetValue();
7027    return true;
7028  } else if (instruction->IsLongConstant()) {
7029    *value = instruction->AsLongConstant()->GetValue();
7030    return true;
7031  } else if (instruction->IsNullConstant()) {
7032    *value = 0;
7033    return true;
7034  }
7035  return false;
7036}
7037
7038#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7039  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7040  inline const H##type* HInstruction::As##type() const {                       \
7041    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7042  }                                                                            \
7043  inline H##type* HInstruction::As##type() {                                   \
7044    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7045  }
7046
7047  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7048#undef INSTRUCTION_TYPE_CHECK
7049
7050// Create space in `blocks` for adding `number_of_new_blocks` entries
7051// starting at location `at`. Blocks after `at` are moved accordingly.
7052inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7053                        size_t number_of_new_blocks,
7054                        size_t after) {
7055  DCHECK_LT(after, blocks->size());
7056  size_t old_size = blocks->size();
7057  size_t new_size = old_size + number_of_new_blocks;
7058  blocks->resize(new_size);
7059  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7060}
7061
7062/*
7063 * Hunt "under the hood" of array lengths (leading to array references),
7064 * null checks (also leading to array references), and new arrays
7065 * (leading to the actual length). This makes it more likely related
7066 * instructions become actually comparable.
7067 */
7068inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7069  while (instruction->IsArrayLength() ||
7070         instruction->IsNullCheck() ||
7071         instruction->IsNewArray()) {
7072    instruction = instruction->IsNewArray()
7073        ? instruction->AsNewArray()->GetLength()
7074        : instruction->InputAt(0);
7075  }
7076  return instruction;
7077}
7078
7079}  // namespace art
7080
7081#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7082