1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler.h"
18
19#include <algorithm>
20#include <vector>
21
22#include "arm/assembler_arm32.h"
23#include "arm/assembler_thumb2.h"
24#include "arm64/assembler_arm64.h"
25#include "mips/assembler_mips.h"
26#include "x86/assembler_x86.h"
27#include "x86_64/assembler_x86_64.h"
28#include "globals.h"
29#include "memory_region.h"
30
31namespace art {
32
33static byte* NewContents(size_t capacity) {
34  return new byte[capacity];
35}
36
37
38AssemblerBuffer::AssemblerBuffer() {
39  static const size_t kInitialBufferCapacity = 4 * KB;
40  contents_ = NewContents(kInitialBufferCapacity);
41  cursor_ = contents_;
42  limit_ = ComputeLimit(contents_, kInitialBufferCapacity);
43  fixup_ = NULL;
44  slow_path_ = NULL;
45#ifndef NDEBUG
46  has_ensured_capacity_ = false;
47  fixups_processed_ = false;
48#endif
49
50  // Verify internal state.
51  CHECK_EQ(Capacity(), kInitialBufferCapacity);
52  CHECK_EQ(Size(), 0U);
53}
54
55
56AssemblerBuffer::~AssemblerBuffer() {
57  delete[] contents_;
58}
59
60
61void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) {
62  AssemblerFixup* fixup = fixup_;
63  while (fixup != NULL) {
64    fixup->Process(region, fixup->position());
65    fixup = fixup->previous();
66  }
67}
68
69
70void AssemblerBuffer::FinalizeInstructions(const MemoryRegion& instructions) {
71  // Copy the instructions from the buffer.
72  MemoryRegion from(reinterpret_cast<void*>(contents()), Size());
73  instructions.CopyFrom(0, from);
74  // Process fixups in the instructions.
75  ProcessFixups(instructions);
76#ifndef NDEBUG
77  fixups_processed_ = true;
78#endif
79}
80
81
82void AssemblerBuffer::ExtendCapacity() {
83  size_t old_size = Size();
84  size_t old_capacity = Capacity();
85  size_t new_capacity = std::min(old_capacity * 2, old_capacity + 1 * MB);
86
87  // Allocate the new data area and copy contents of the old one to it.
88  byte* new_contents = NewContents(new_capacity);
89  memmove(reinterpret_cast<void*>(new_contents),
90          reinterpret_cast<void*>(contents_),
91          old_size);
92
93  // Compute the relocation delta and switch to the new contents area.
94  ptrdiff_t delta = new_contents - contents_;
95  contents_ = new_contents;
96
97  // Update the cursor and recompute the limit.
98  cursor_ += delta;
99  limit_ = ComputeLimit(new_contents, new_capacity);
100
101  // Verify internal state.
102  CHECK_EQ(Capacity(), new_capacity);
103  CHECK_EQ(Size(), old_size);
104}
105
106
107Assembler* Assembler::Create(InstructionSet instruction_set) {
108  switch (instruction_set) {
109    case kArm:
110      return new arm::Arm32Assembler();
111    case kThumb2:
112      return new arm::Thumb2Assembler();
113    case kArm64:
114      return new arm64::Arm64Assembler();
115    case kMips:
116      return new mips::MipsAssembler();
117    case kX86:
118      return new x86::X86Assembler();
119    case kX86_64:
120      return new x86_64::X86_64Assembler();
121    default:
122      LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
123      return NULL;
124  }
125}
126
127void Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
128                                         ManagedRegister scratch) {
129  UNIMPLEMENTED(FATAL);
130}
131
132void Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
133                                         ManagedRegister scratch) {
134  UNIMPLEMENTED(FATAL);
135}
136
137void Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
138                                           FrameOffset fr_offs,
139                                           ManagedRegister scratch) {
140  UNIMPLEMENTED(FATAL);
141}
142
143void Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
144                                           FrameOffset fr_offs,
145                                           ManagedRegister scratch) {
146  UNIMPLEMENTED(FATAL);
147}
148
149void Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
150  UNIMPLEMENTED(FATAL);
151}
152
153void Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
154  UNIMPLEMENTED(FATAL);
155}
156
157void Assembler::LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size) {
158  UNIMPLEMENTED(FATAL);
159}
160
161void Assembler::LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) {
162  UNIMPLEMENTED(FATAL);
163}
164
165void Assembler::LoadRawPtrFromThread32(ManagedRegister dest, ThreadOffset<4> offs) {
166  UNIMPLEMENTED(FATAL);
167}
168
169void Assembler::LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) {
170  UNIMPLEMENTED(FATAL);
171}
172
173void Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs, ThreadOffset<4> thr_offs,
174                                       ManagedRegister scratch) {
175  UNIMPLEMENTED(FATAL);
176}
177
178void Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
179                                       ManagedRegister scratch) {
180  UNIMPLEMENTED(FATAL);
181}
182
183void Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs, FrameOffset fr_offs,
184                                     ManagedRegister scratch) {
185  UNIMPLEMENTED(FATAL);
186}
187
188void Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
189                                     ManagedRegister scratch) {
190  UNIMPLEMENTED(FATAL);
191}
192
193void Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister scratch) {
194  UNIMPLEMENTED(FATAL);
195}
196
197void Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) {
198  UNIMPLEMENTED(FATAL);
199}
200
201}  // namespace art
202