1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
18#define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
19
20#include <stdint.h>
21#include <memory>
22#include <vector>
23
24#include <android-base/logging.h>
25
26#include "base/arena_containers.h"
27#include "base/macros.h"
28#include "offsets.h"
29#include "utils/arm64/managed_register_arm64.h"
30#include "utils/assembler.h"
31
32// TODO(VIXL): Make VIXL compile with -Wshadow.
33#pragma GCC diagnostic push
34#pragma GCC diagnostic ignored "-Wshadow"
35#include "aarch64/disasm-aarch64.h"
36#include "aarch64/macro-assembler-aarch64.h"
37#pragma GCC diagnostic pop
38
39namespace art {
40namespace arm64 {
41
42#define MEM_OP(...)      vixl::aarch64::MemOperand(__VA_ARGS__)
43
44enum LoadOperandType {
45  kLoadSignedByte,
46  kLoadUnsignedByte,
47  kLoadSignedHalfword,
48  kLoadUnsignedHalfword,
49  kLoadWord,
50  kLoadCoreWord,
51  kLoadSWord,
52  kLoadDWord
53};
54
55enum StoreOperandType {
56  kStoreByte,
57  kStoreHalfword,
58  kStoreWord,
59  kStoreCoreWord,
60  kStoreSWord,
61  kStoreDWord
62};
63
64class Arm64Assembler FINAL : public Assembler {
65 public:
66  explicit Arm64Assembler(ArenaAllocator* allocator) : Assembler(allocator) {}
67
68  virtual ~Arm64Assembler() {}
69
70  vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; }
71
72  // Finalize the code.
73  void FinalizeCode() OVERRIDE;
74
75  // Size of generated code.
76  size_t CodeSize() const OVERRIDE;
77  const uint8_t* CodeBufferBaseAddress() const OVERRIDE;
78
79  // Copy instructions out of assembly buffer into the given region of memory.
80  void FinalizeInstructions(const MemoryRegion& region);
81
82  void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs);
83
84  void SpillRegisters(vixl::aarch64::CPURegList registers, int offset);
85  void UnspillRegisters(vixl::aarch64::CPURegList registers, int offset);
86
87  // Jump to address (not setting link register)
88  void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
89
90  //
91  // Heap poisoning.
92  //
93
94  // Poison a heap reference contained in `reg`.
95  void PoisonHeapReference(vixl::aarch64::Register reg);
96  // Unpoison a heap reference contained in `reg`.
97  void UnpoisonHeapReference(vixl::aarch64::Register reg);
98  // Poison a heap reference contained in `reg` if heap poisoning is enabled.
99  void MaybePoisonHeapReference(vixl::aarch64::Register reg);
100  // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
101  void MaybeUnpoisonHeapReference(vixl::aarch64::Register reg);
102
103  // Emit code checking the status of the Marking Register, and aborting
104  // the program if MR does not match the value stored in the art::Thread
105  // object.
106  //
107  // Argument `temp` is used as a temporary register to generate code.
108  // Argument `code` is used to identify the different occurrences of
109  // MaybeGenerateMarkingRegisterCheck and is passed to the BRK instruction.
110  void GenerateMarkingRegisterCheck(vixl::aarch64::Register temp, int code = 0);
111
112  void Bind(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
113    UNIMPLEMENTED(FATAL) << "Do not use Bind for ARM64";
114  }
115  void Jump(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
116    UNIMPLEMENTED(FATAL) << "Do not use Jump for ARM64";
117  }
118
119  static vixl::aarch64::Register reg_x(int code) {
120    CHECK(code < kNumberOfXRegisters) << code;
121    if (code == SP) {
122      return vixl::aarch64::sp;
123    } else if (code == XZR) {
124      return vixl::aarch64::xzr;
125    }
126    return vixl::aarch64::Register::GetXRegFromCode(code);
127  }
128
129  static vixl::aarch64::Register reg_w(int code) {
130    CHECK(code < kNumberOfWRegisters) << code;
131    if (code == WSP) {
132      return vixl::aarch64::wsp;
133    } else if (code == WZR) {
134      return vixl::aarch64::wzr;
135    }
136    return vixl::aarch64::Register::GetWRegFromCode(code);
137  }
138
139  static vixl::aarch64::FPRegister reg_d(int code) {
140    return vixl::aarch64::FPRegister::GetDRegFromCode(code);
141  }
142
143  static vixl::aarch64::FPRegister reg_s(int code) {
144    return vixl::aarch64::FPRegister::GetSRegFromCode(code);
145  }
146
147 private:
148  // VIXL assembler.
149  vixl::aarch64::MacroAssembler vixl_masm_;
150
151  // Used for testing.
152  friend class Arm64ManagedRegister_VixlRegisters_Test;
153};
154
155}  // namespace arm64
156}  // namespace art
157
158#endif  // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
159