1// Copyright 2014, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7//   * Redistributions of source code must retain the above copyright notice,
8//     this list of conditions and the following disclaimer.
9//   * Redistributions in binary form must reproduce the above copyright notice,
10//     this list of conditions and the following disclaimer in the documentation
11//     and/or other materials provided with the distribution.
12//   * Neither the name of ARM Limited nor the names of its contributors may be
13//     used to endorse or promote products derived from this software without
14//     specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27extern "C" {
28#include <sys/mman.h>
29}
30
31#include "code-buffer-vixl.h"
32#include "utils-vixl.h"
33
34namespace vixl {
35
36// BSD uses `MAP_ANON` instead of the Linux `MAP_ANONYMOUS`. The `MAP_ANONYMOUS`
37// alias should generally be available, but is not always, so define it manually
38// if necessary.
39#if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
40#define MAP_ANONYMOUS MAP_ANON
41#endif
42
43CodeBuffer::CodeBuffer(size_t capacity)
44    : buffer_(NULL),
45      managed_(true),
46      cursor_(NULL),
47      dirty_(false),
48      capacity_(capacity) {
49  if (capacity_ == 0) {
50    return;
51  }
52#ifdef VIXL_CODE_BUFFER_MALLOC
53  buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
54#elif defined(VIXL_CODE_BUFFER_MMAP)
55  buffer_ = reinterpret_cast<byte*>(mmap(NULL,
56                                         capacity,
57                                         PROT_READ | PROT_WRITE,
58                                         MAP_PRIVATE | MAP_ANONYMOUS,
59                                         -1,
60                                         0));
61#else
62#error Unknown code buffer allocator.
63#endif
64  VIXL_CHECK(buffer_ != NULL);
65  // Aarch64 instructions must be word aligned, we assert the default allocator
66  // always returns word align memory.
67  VIXL_ASSERT(IsWordAligned(buffer_));
68
69  cursor_ = buffer_;
70}
71
72
73CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
74    : buffer_(reinterpret_cast<byte*>(buffer)),
75      managed_(false),
76      cursor_(reinterpret_cast<byte*>(buffer)),
77      dirty_(false),
78      capacity_(capacity) {
79  VIXL_ASSERT(buffer_ != NULL);
80}
81
82
83CodeBuffer::~CodeBuffer() {
84  VIXL_ASSERT(!IsDirty());
85  if (managed_) {
86#ifdef VIXL_CODE_BUFFER_MALLOC
87    free(buffer_);
88#elif defined(VIXL_CODE_BUFFER_MMAP)
89    munmap(buffer_, capacity_);
90#else
91#error Unknown code buffer allocator.
92#endif
93  }
94}
95
96
97#ifdef VIXL_CODE_BUFFER_MMAP
98void CodeBuffer::SetExecutable() {
99  int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
100  VIXL_CHECK(ret == 0);
101}
102#endif
103
104
105#ifdef VIXL_CODE_BUFFER_MMAP
106void CodeBuffer::SetWritable() {
107  int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
108  VIXL_CHECK(ret == 0);
109}
110#endif
111
112
113void CodeBuffer::EmitString(const char* string) {
114  VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
115  char* dst = reinterpret_cast<char*>(cursor_);
116  dirty_ = true;
117  char* null_char = stpcpy(dst, string);
118  cursor_ = reinterpret_cast<byte*>(null_char) + 1;
119}
120
121
122void CodeBuffer::EmitData(const void* data, size_t size) {
123  VIXL_ASSERT(HasSpaceFor(size));
124  dirty_ = true;
125  memcpy(cursor_, data, size);
126  cursor_ = cursor_ + size;
127}
128
129
130void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
131  dirty_ = true;
132  byte* dst = buffer_ + offset;
133  VIXL_ASSERT(dst + size <= cursor_);
134  memcpy(dst, data, size);
135}
136
137
138void CodeBuffer::Align() {
139  byte* end = AlignUp(cursor_, 4);
140  const size_t padding_size = end - cursor_;
141  VIXL_ASSERT(padding_size <= 4);
142  EnsureSpaceFor(padding_size);
143  dirty_ = true;
144  memset(cursor_, 0, padding_size);
145  cursor_ = end;
146}
147
148
149void CodeBuffer::Reset() {
150#ifdef VIXL_DEBUG
151  if (managed_) {
152    // Fill with zeros (there is no useful value common to A32 and T32).
153    memset(buffer_, 0, capacity_);
154  }
155#endif
156  cursor_ = buffer_;
157  SetClean();
158}
159
160
161void CodeBuffer::Grow(size_t new_capacity) {
162  VIXL_ASSERT(managed_);
163  VIXL_ASSERT(new_capacity > capacity_);
164  ptrdiff_t cursor_offset = GetCursorOffset();
165#ifdef VIXL_CODE_BUFFER_MALLOC
166  buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
167  VIXL_CHECK(buffer_ != NULL);
168#elif defined(VIXL_CODE_BUFFER_MMAP)
169#ifdef __APPLE__
170  // TODO: Avoid using VIXL_CODE_BUFFER_MMAP.
171  // Don't use false to avoid having the compiler realize it's a noreturn
172  // method.
173  VIXL_ASSERT(!managed_);
174#else
175  buffer_ = static_cast<byte*>(
176      mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
177  VIXL_CHECK(buffer_ != MAP_FAILED);
178#endif
179#else
180#error Unknown code buffer allocator.
181#endif
182
183  cursor_ = buffer_ + cursor_offset;
184  capacity_ = new_capacity;
185}
186
187
188}  // namespace vixl
189