1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 18#define ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 19 20#include "memory_tool_malloc_space.h" 21 22#include "base/memory_tool.h" 23#include "memory_tool_settings.h" 24#include "mirror/object-inl.h" 25 26namespace art { 27namespace gc { 28namespace space { 29 30namespace memory_tool_details { 31 32template <size_t kMemoryToolRedZoneBytes, bool kUseObjSizeForUsable> 33inline mirror::Object* AdjustForValgrind(void* obj_with_rdz, size_t num_bytes, 34 size_t bytes_allocated, size_t usable_size, 35 size_t bytes_tl_bulk_allocated, 36 size_t* bytes_allocated_out, size_t* usable_size_out, 37 size_t* bytes_tl_bulk_allocated_out) { 38 if (bytes_allocated_out != nullptr) { 39 *bytes_allocated_out = bytes_allocated; 40 } 41 if (bytes_tl_bulk_allocated_out != nullptr) { 42 *bytes_tl_bulk_allocated_out = bytes_tl_bulk_allocated; 43 } 44 45 // This cuts over-provision and is a trade-off between testing the over-provisioning code paths 46 // vs checking overflows in the regular paths. 47 if (usable_size_out != nullptr) { 48 if (kUseObjSizeForUsable) { 49 *usable_size_out = num_bytes; 50 } else { 51 *usable_size_out = usable_size - 2 * kMemoryToolRedZoneBytes; 52 } 53 } 54 55 // Left redzone. 56 MEMORY_TOOL_MAKE_NOACCESS(obj_with_rdz, kMemoryToolRedZoneBytes); 57 58 // Make requested memory readable. 59 // (If the allocator assumes memory is zeroed out, we might get UNDEFINED warnings, so make 60 // everything DEFINED initially.) 61 mirror::Object* result = reinterpret_cast<mirror::Object*>( 62 reinterpret_cast<uint8_t*>(obj_with_rdz) + kMemoryToolRedZoneBytes); 63 MEMORY_TOOL_MAKE_DEFINED(result, num_bytes); 64 65 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is 66 // management data at the upper end, and for simplicity we will not protect that. 67 // At the moment, this fits RosAlloc (no management data in a slot, usable_size == alloc_size) 68 // and DlMalloc (allocation_size = (usable_size == num_bytes) + 4, 4 is management) 69 MEMORY_TOOL_MAKE_NOACCESS(reinterpret_cast<uint8_t*>(result) + num_bytes, 70 usable_size - (num_bytes + kMemoryToolRedZoneBytes)); 71 72 return result; 73} 74 75inline size_t GetObjSizeNoThreadSafety(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS { 76 return obj->SizeOf<kVerifyNone>(); 77} 78 79} // namespace memory_tool_details 80 81template <typename S, 82 size_t kMemoryToolRedZoneBytes, 83 bool kAdjustForRedzoneInAllocSize, 84 bool kUseObjSizeForUsable> 85mirror::Object* 86MemoryToolMallocSpace<S, 87 kMemoryToolRedZoneBytes, 88 kAdjustForRedzoneInAllocSize, 89 kUseObjSizeForUsable>::AllocWithGrowth( 90 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 91 size_t* bytes_tl_bulk_allocated_out) { 92 size_t bytes_allocated; 93 size_t usable_size; 94 size_t bytes_tl_bulk_allocated; 95 void* obj_with_rdz = S::AllocWithGrowth(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 96 &bytes_allocated, &usable_size, 97 &bytes_tl_bulk_allocated); 98 if (obj_with_rdz == nullptr) { 99 return nullptr; 100 } 101 102 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( 103 obj_with_rdz, num_bytes, 104 bytes_allocated, usable_size, 105 bytes_tl_bulk_allocated, 106 bytes_allocated_out, 107 usable_size_out, 108 bytes_tl_bulk_allocated_out); 109} 110 111template <typename S, 112 size_t kMemoryToolRedZoneBytes, 113 bool kAdjustForRedzoneInAllocSize, 114 bool kUseObjSizeForUsable> 115mirror::Object* MemoryToolMallocSpace<S, 116 kMemoryToolRedZoneBytes, 117 kAdjustForRedzoneInAllocSize, 118 kUseObjSizeForUsable>::Alloc( 119 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 120 size_t* bytes_tl_bulk_allocated_out) { 121 size_t bytes_allocated; 122 size_t usable_size; 123 size_t bytes_tl_bulk_allocated; 124 void* obj_with_rdz = S::Alloc(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 125 &bytes_allocated, &usable_size, &bytes_tl_bulk_allocated); 126 if (obj_with_rdz == nullptr) { 127 return nullptr; 128 } 129 130 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, 131 kUseObjSizeForUsable>(obj_with_rdz, num_bytes, 132 bytes_allocated, usable_size, 133 bytes_tl_bulk_allocated, 134 bytes_allocated_out, 135 usable_size_out, 136 bytes_tl_bulk_allocated_out); 137} 138 139template <typename S, 140 size_t kMemoryToolRedZoneBytes, 141 bool kAdjustForRedzoneInAllocSize, 142 bool kUseObjSizeForUsable> 143mirror::Object* MemoryToolMallocSpace<S, 144 kMemoryToolRedZoneBytes, 145 kAdjustForRedzoneInAllocSize, 146 kUseObjSizeForUsable>::AllocThreadUnsafe( 147 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 148 size_t* bytes_tl_bulk_allocated_out) { 149 size_t bytes_allocated; 150 size_t usable_size; 151 size_t bytes_tl_bulk_allocated; 152 void* obj_with_rdz = S::AllocThreadUnsafe(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 153 &bytes_allocated, &usable_size, 154 &bytes_tl_bulk_allocated); 155 if (obj_with_rdz == nullptr) { 156 return nullptr; 157 } 158 159 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( 160 obj_with_rdz, num_bytes, 161 bytes_allocated, usable_size, 162 bytes_tl_bulk_allocated, 163 bytes_allocated_out, 164 usable_size_out, 165 bytes_tl_bulk_allocated_out); 166} 167 168template <typename S, 169 size_t kMemoryToolRedZoneBytes, 170 bool kAdjustForRedzoneInAllocSize, 171 bool kUseObjSizeForUsable> 172size_t MemoryToolMallocSpace<S, 173 kMemoryToolRedZoneBytes, 174 kAdjustForRedzoneInAllocSize, 175 kUseObjSizeForUsable>::AllocationSize( 176 mirror::Object* obj, size_t* usable_size) { 177 size_t result = S::AllocationSize(reinterpret_cast<mirror::Object*>( 178 reinterpret_cast<uint8_t*>(obj) - (kAdjustForRedzoneInAllocSize ? kMemoryToolRedZoneBytes : 0)), 179 usable_size); 180 if (usable_size != nullptr) { 181 if (kUseObjSizeForUsable) { 182 *usable_size = memory_tool_details::GetObjSizeNoThreadSafety(obj); 183 } else { 184 *usable_size = *usable_size - 2 * kMemoryToolRedZoneBytes; 185 } 186 } 187 return result; 188} 189 190template <typename S, 191 size_t kMemoryToolRedZoneBytes, 192 bool kAdjustForRedzoneInAllocSize, 193 bool kUseObjSizeForUsable> 194size_t MemoryToolMallocSpace<S, 195 kMemoryToolRedZoneBytes, 196 kAdjustForRedzoneInAllocSize, 197 kUseObjSizeForUsable>::Free( 198 Thread* self, mirror::Object* ptr) { 199 void* obj_after_rdz = reinterpret_cast<void*>(ptr); 200 uint8_t* obj_with_rdz = reinterpret_cast<uint8_t*>(obj_after_rdz) - kMemoryToolRedZoneBytes; 201 202 // Make redzones undefined. 203 size_t usable_size; 204 size_t allocation_size = AllocationSize(ptr, &usable_size); 205 206 // Unprotect the allocation. 207 // Use the obj-size-for-usable flag to determine whether usable_size is the more important one, 208 // e.g., whether there's data in the allocation_size (and usable_size can't be trusted). 209 if (kUseObjSizeForUsable) { 210 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, allocation_size); 211 } else { 212 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, usable_size + 2 * kMemoryToolRedZoneBytes); 213 } 214 215 return S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz)); 216} 217 218template <typename S, 219 size_t kMemoryToolRedZoneBytes, 220 bool kAdjustForRedzoneInAllocSize, 221 bool kUseObjSizeForUsable> 222size_t MemoryToolMallocSpace<S, 223 kMemoryToolRedZoneBytes, 224 kAdjustForRedzoneInAllocSize, 225 kUseObjSizeForUsable>::FreeList( 226 Thread* self, size_t num_ptrs, mirror::Object** ptrs) { 227 size_t freed = 0; 228 for (size_t i = 0; i < num_ptrs; i++) { 229 freed += Free(self, ptrs[i]); 230 ptrs[i] = nullptr; 231 } 232 return freed; 233} 234 235template <typename S, 236 size_t kMemoryToolRedZoneBytes, 237 bool kAdjustForRedzoneInAllocSize, 238 bool kUseObjSizeForUsable> 239template <typename... Params> 240MemoryToolMallocSpace<S, 241 kMemoryToolRedZoneBytes, 242 kAdjustForRedzoneInAllocSize, 243 kUseObjSizeForUsable>::MemoryToolMallocSpace( 244 MemMap* mem_map, size_t initial_size, Params... params) : S(mem_map, initial_size, params...) { 245 // Don't want to change the valgrind states of the mem map here as the allocator is already 246 // initialized at this point and that may interfere with what the allocator does internally. Note 247 // that the tail beyond the initial size is mprotected. 248} 249 250template <typename S, 251 size_t kMemoryToolRedZoneBytes, 252 bool kAdjustForRedzoneInAllocSize, 253 bool kUseObjSizeForUsable> 254size_t MemoryToolMallocSpace<S, 255 kMemoryToolRedZoneBytes, 256 kAdjustForRedzoneInAllocSize, 257 kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) { 258 return S::MaxBytesBulkAllocatedFor(num_bytes + 2 * kMemoryToolRedZoneBytes); 259} 260 261} // namespace space 262} // namespace gc 263} // namespace art 264 265#endif // ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 266