bump_pointer_space-inl.h revision 14d90579f013b374638b599361970557ed4b3f09
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
18#define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
19
20#include "base/bit_utils.h"
21#include "bump_pointer_space.h"
22
23namespace art {
24namespace gc {
25namespace space {
26
27inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated,
28                                               size_t* usable_size,
29                                               size_t* bytes_tl_bulk_allocated) {
30  num_bytes = RoundUp(num_bytes, kAlignment);
31  mirror::Object* ret = AllocNonvirtual(num_bytes);
32  if (LIKELY(ret != nullptr)) {
33    *bytes_allocated = num_bytes;
34    if (usable_size != nullptr) {
35      *usable_size = num_bytes;
36    }
37    *bytes_tl_bulk_allocated = num_bytes;
38  }
39  return ret;
40}
41
42inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes,
43                                                           size_t* bytes_allocated,
44                                                           size_t* usable_size,
45                                                           size_t* bytes_tl_bulk_allocated) {
46  Locks::mutator_lock_->AssertExclusiveHeld(self);
47  num_bytes = RoundUp(num_bytes, kAlignment);
48  uint8_t* end = end_.LoadRelaxed();
49  if (end + num_bytes > growth_end_) {
50    return nullptr;
51  }
52  mirror::Object* obj = reinterpret_cast<mirror::Object*>(end);
53  end_.StoreRelaxed(end + num_bytes);
54  *bytes_allocated = num_bytes;
55  // Use the CAS free versions as an optimization.
56  objects_allocated_.StoreRelaxed(objects_allocated_.LoadRelaxed() + 1);
57  bytes_allocated_.StoreRelaxed(bytes_allocated_.LoadRelaxed() + num_bytes);
58  if (UNLIKELY(usable_size != nullptr)) {
59    *usable_size = num_bytes;
60  }
61  *bytes_tl_bulk_allocated = num_bytes;
62  return obj;
63}
64
65inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) {
66  DCHECK_ALIGNED(num_bytes, kAlignment);
67  uint8_t* old_end;
68  uint8_t* new_end;
69  do {
70    old_end = end_.LoadRelaxed();
71    new_end = old_end + num_bytes;
72    // If there is no more room in the region, we are out of memory.
73    if (UNLIKELY(new_end > growth_end_)) {
74      return nullptr;
75    }
76  } while (!end_.CompareExchangeWeakSequentiallyConsistent(old_end, new_end));
77  return reinterpret_cast<mirror::Object*>(old_end);
78}
79
80inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) {
81  mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes);
82  if (ret != nullptr) {
83    objects_allocated_.FetchAndAddSequentiallyConsistent(1);
84    bytes_allocated_.FetchAndAddSequentiallyConsistent(num_bytes);
85  }
86  return ret;
87}
88
89inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
90    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
91  size_t num_bytes = obj->SizeOf();
92  if (usable_size != nullptr) {
93    *usable_size = RoundUp(num_bytes, kAlignment);
94  }
95  return num_bytes;
96}
97
98}  // namespace space
99}  // namespace gc
100}  // namespace art
101
102#endif  // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
103