/art/runtime/ |
H A D | method_info.h | 33 explicit MethodInfo(const uint8_t* ptr) { argument 34 if (ptr != nullptr) { 35 num_method_indices_ = DecodeUnsignedLeb128(&ptr); 36 region_ = MemoryRegion(const_cast<uint8_t*>(ptr), 42 MethodInfo(uint8_t* ptr, size_t num_method_indices) : num_method_indices_(num_method_indices) { argument 43 DCHECK(ptr != nullptr); 44 ptr = EncodeUnsignedLeb128(ptr, num_method_indices_); 45 region_ = MemoryRegion(ptr, num_method_indices_ * sizeof(MethodIndexType)); 50 uint8_t* ptr local [all...] |
H A D | leb128.h | 32 const uint8_t* ptr = *data; local 33 int result = *(ptr++); 35 int cur = *(ptr++); 38 cur = *(ptr++); 41 cur = *(ptr++); 46 cur = *(ptr++); 52 *data = ptr; 59 const uint8_t* ptr = *data; local 60 if (ptr >= end) { 63 int result = *(ptr 111 const uint8_t* ptr = *data; local 146 const uint8_t* ptr = *data; local [all...] |
H A D | linear_alloc.cc | 26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { argument 28 return allocator_.Realloc(ptr, old_size, new_size); 51 bool LinearAlloc::Contains(void* ptr) const { 53 return allocator_.Contains(ptr); 56 bool LinearAlloc::ContainsUnsafe(void* ptr) const { 57 return allocator_.Contains(ptr);
|
H A D | obj_ptr.h | 58 ALWAYS_INLINE ObjPtr(Type* ptr) // NOLINT 60 : reference_(Encode(static_cast<MirrorType*>(ptr))) { 78 ALWAYS_INLINE ObjPtr& operator=(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { 79 Assign(ptr); 83 ALWAYS_INLINE void Assign(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { 84 reference_ = Encode(ptr); 105 ALWAYS_INLINE bool operator==(const ObjPtr& ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { 106 return Ptr() == ptr.Ptr(); 110 ALWAYS_INLINE bool operator==(const PointerType* ptr) const 112 return Ptr() == ptr; 201 MakeObjPtr(MirrorType* ptr) argument 206 MakeObjPtr(ObjPtr<MirrorType> ptr) argument [all...] |
H A D | linear_alloc.h | 35 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_); 49 bool Contains(void* ptr) const REQUIRES(!lock_); 53 bool ContainsUnsafe(void* ptr) const NO_THREAD_SAFETY_ANALYSIS;
|
H A D | imtable.h | 46 uint8_t* ptr = AddressOfElement(index, pointer_size); local 48 uint32_t value = *reinterpret_cast<uint32_t*>(ptr); 51 uint64_t value = *reinterpret_cast<uint64_t*>(ptr); 58 uint8_t* ptr = AddressOfElement(index, pointer_size); local 62 *reinterpret_cast<uint32_t*>(ptr) = static_cast<uint32_t>(value); 64 *reinterpret_cast<uint64_t*>(ptr) = reinterpret_cast<uint64_t>(method);
|
H A D | obj_ptr-inl.h | 42 inline uintptr_t ObjPtr<MirrorType>::Encode(MirrorType* ptr) { argument 43 uintptr_t ref = reinterpret_cast<uintptr_t>(ptr); 57 inline std::ostream& operator<<(std::ostream& os, ObjPtr<MirrorType> ptr) { argument 59 return os << ptr.PtrUnchecked();
|
H A D | utf_test.cc | 61 const char* ptr = start; local 65 pair = GetUtf16FromUtf8(&ptr); 68 EXPECT_ARRAY_POSITION(1, ptr, start); 71 pair = GetUtf16FromUtf8(&ptr); 74 EXPECT_ARRAY_POSITION(3, ptr, start); 77 pair = GetUtf16FromUtf8(&ptr); 80 EXPECT_ARRAY_POSITION(6, ptr, start); 83 pair = GetUtf16FromUtf8(&ptr); 86 EXPECT_ARRAY_POSITION(10, ptr, start); 89 pair = GetUtf16FromUtf8(&ptr); 97 const char* ptr = start; local [all...] |
H A D | stride_iterator.h | 34 StrideIterator(T* ptr, size_t stride) argument 35 : ptr_(reinterpret_cast<uintptr_t>(ptr)),
|
H A D | oat.cc | 409 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local 410 const char* end = ptr + key_value_store_size_; 412 while (ptr < end) { 414 const char* str_end = ParseString(ptr, end); 416 if (strcmp(key, ptr) == 0) { 423 ptr = ParseString(str_end + 1, end) + 1; 435 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local 436 const char* end = ptr + key_value_store_size_; 439 while (ptr < end && counter >= 0) { 441 const char* str_end = ParseString(ptr, en [all...] |
H A D | type_lookup_table.h | 128 const uint8_t* ptr = dex_file_begin_ + str_offset; local 131 DecodeUnsignedLeb128(&ptr); 133 str, reinterpret_cast<const char*>(ptr)) == 0;
|
H A D | mem_map.cc | 141 bool MemMap::ContainedWithinExistingMap(uint8_t* ptr, size_t size, std::string* error_msg) { argument 142 uintptr_t begin = reinterpret_cast<uintptr_t>(ptr); 264 static inline void* TryMemMapLow4GB(void* ptr, argument 270 void* actual = mmap(ptr, page_aligned_byte_count, prot, flags, fd, offset); 840 for (uintptr_t ptr = next_mem_pos_; ptr < 4 * GB; ptr += kPageSize) { 842 // Find the first map which is address > ptr. 843 auto it = gMaps->upper_bound(reinterpret_cast<void*>(ptr)); 848 ptr [all...] |
/art/test/ti-agent/ |
H A D | scoped_local_ref.h | 38 void reset(T ptr = nullptr) { 39 if (ptr != mLocalRef) { 43 mLocalRef = ptr;
|
H A D | jvmti_helper.h | 49 void operator()(unsigned char* ptr) const { 51 jvmtiError ret = env_->Deallocate(ptr);
|
/art/runtime/mirror/ |
H A D | object_reference-inl.h | 28 void ObjectReference<kPoisonReferences, MirrorType>::Assign(ObjPtr<MirrorType> ptr) { argument 29 Assign(ptr.Ptr()); 33 HeapReference<MirrorType> HeapReference<MirrorType>::FromObjPtr(ObjPtr<MirrorType> ptr) { argument 34 return HeapReference<MirrorType>(ptr.Ptr());
|
/art/runtime/base/ |
H A D | scoped_arena_allocator.h | 63 static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { argument 65 return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1); 95 uint8_t* ptr = top_ptr_; variable 96 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { 97 ptr = AllocateFromNextArena(rounded_bytes); 100 top_ptr_ = ptr + rounded_bytes; 102 ptr += kAlignment; 103 ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; variable 105 return ptr; 169 static void operator delete(void* ptr ATTRIBUTE_UNUSE [all...] |
H A D | scoped_arena_allocator.cc | 98 uint8_t* ptr = top_ptr_; local 99 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { 100 ptr = AllocateFromNextArena(rounded_bytes); 101 CHECK(ptr != nullptr) << "Failed to allocate memory"; 102 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr); 105 top_ptr_ = ptr + rounded_bytes; 106 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes); 107 return ptr;
|
H A D | arena_allocator.h | 174 void MakeDefined(void* ptr, size_t size) { argument 176 DoMakeDefined(ptr, size); 179 void MakeUndefined(void* ptr, size_t size) { argument 181 DoMakeUndefined(ptr, size); 184 void MakeInaccessible(void* ptr, size_t size) { argument 186 DoMakeInaccessible(ptr, size); 191 void DoMakeDefined(void* ptr, size_t size); 192 void DoMakeUndefined(void* ptr, size_t size); 193 void DoMakeInaccessible(void* ptr, size_t size); 225 // Return true if ptr i [all...] |
H A D | arena_object.h | 45 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; }
|
H A D | scoped_arena_containers.h | 207 ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const { argument 211 memset(ptr, kMagicFill, size); 212 MEMORY_TOOL_MAKE_NOACCESS(ptr, size); 214 CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed) 215 << "Freeing invalid object " << ptr; 216 ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree; 218 memset(ptr, kMagicFill, size); 223 void operator()(T* ptr) const { 224 if (ptr != nullptr) { 225 ptr [all...] |
H A D | hash_set.h | 211 HashSet(const uint8_t* ptr, bool make_copy_of_data, size_t* read_count) noexcept { 214 offset = ReadFromBytes(ptr, offset, &temp); 216 offset = ReadFromBytes(ptr, offset, &temp); 219 offset = ReadFromBytes(ptr, offset, &temp); 221 offset = ReadFromBytes(ptr, offset, &min_load_factor_); 222 offset = ReadFromBytes(ptr, offset, &max_load_factor_); 225 data_ = const_cast<T*>(reinterpret_cast<const T*>(ptr + offset)); 232 offset = ReadFromBytes(ptr, offset, &data_[i]); 241 size_t WriteToMemory(uint8_t* ptr) const { 243 offset = WriteToBytes(ptr, offse 648 WriteToBytes(uint8_t* ptr, size_t offset, Elem n) argument 657 ReadFromBytes(const uint8_t* ptr, size_t offset, Elem* out) argument [all...] |
H A D | macros.h | 43 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \
|
/art/compiler/utils/ |
H A D | swap_space.cc | 74 if (munmap(chunk.ptr, chunk.size) != 0) { 76 << static_cast<const void*>(chunk.ptr) << " size=" << chunk.size; 125 // The free_by_start_ map contains disjoint intervals ordered by the `ptr`. 127 it->free_by_start_entry->ptr += size; 130 // The free_by_size_ map is ordered by the `size` and then `free_by_start_entry->ptr`. 131 // Adjusting the `ptr` above does not change that ordering but decreasing `size` can 148 return old_chunk.ptr; 154 SpaceChunk remainder = { new_chunk.ptr + size, new_chunk.size - size }; 157 return new_chunk.ptr; 168 uint8_t* ptr local 189 Free(void* ptr, size_t size) argument [all...] |
/art/runtime/arch/arm/ |
H A D | fault_handler_arm.cc | 77 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local 78 VLOG(signals) << "pc: " << std::hex << static_cast<void*>(ptr); 80 if (ptr == nullptr) { 86 uint32_t instr_size = GetInstructionSize(ptr); 104 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local 105 uint32_t instr_size = GetInstructionSize(ptr);
|
/art/runtime/gc/space/ |
H A D | memory_tool_malloc_space.h | 45 size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE 51 void RegisterRecentFree(mirror::Object* ptr ATTRIBUTE_UNUSED) OVERRIDE {}
|