Searched refs:ptr (Results 1 - 25 of 88) sorted by relevance

1234

/art/runtime/
H A Dlinear_alloc.cc26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { argument
28 return allocator_.Realloc(ptr, old_size, new_size);
46 bool LinearAlloc::Contains(void* ptr) const {
48 return allocator_.Contains(ptr);
51 bool LinearAlloc::ContainsUnsafe(void* ptr) const {
52 return allocator_.Contains(ptr);
H A Dimtable.h37 uint8_t* ptr = reinterpret_cast<uint8_t*>(this) + OffsetOfElement(index, pointer_size); local
39 uint32_t value = *reinterpret_cast<uint32_t*>(ptr);
42 uint64_t value = *reinterpret_cast<uint64_t*>(ptr);
49 uint8_t* ptr = reinterpret_cast<uint8_t*>(this) + OffsetOfElement(index, pointer_size); local
53 *reinterpret_cast<uint32_t*>(ptr) = static_cast<uint32_t>(value);
55 *reinterpret_cast<uint64_t*>(ptr) = reinterpret_cast<uint64_t>(method);
H A Dlinear_alloc.h34 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
48 bool Contains(void* ptr) const REQUIRES(!lock_);
52 bool ContainsUnsafe(void* ptr) const NO_THREAD_SAFETY_ANALYSIS;
H A Dleb128.h32 const uint8_t* ptr = *data; local
33 int result = *(ptr++);
35 int cur = *(ptr++);
38 cur = *(ptr++);
41 cur = *(ptr++);
46 cur = *(ptr++);
52 *data = ptr;
68 const uint8_t* ptr = *data; local
69 int32_t result = *(ptr++);
73 int cur = *(ptr
[all...]
H A Dutf_test.cc61 const char* ptr = start; local
65 pair = GetUtf16FromUtf8(&ptr);
68 EXPECT_ARRAY_POSITION(1, ptr, start);
71 pair = GetUtf16FromUtf8(&ptr);
74 EXPECT_ARRAY_POSITION(3, ptr, start);
77 pair = GetUtf16FromUtf8(&ptr);
80 EXPECT_ARRAY_POSITION(6, ptr, start);
83 pair = GetUtf16FromUtf8(&ptr);
86 EXPECT_ARRAY_POSITION(10, ptr, start);
89 pair = GetUtf16FromUtf8(&ptr);
97 const char* ptr = start; local
[all...]
H A Dstride_iterator.h34 StrideIterator(T* ptr, size_t stride) argument
35 : ptr_(reinterpret_cast<uintptr_t>(ptr)),
H A Ddex_file-inl.h29 const uint8_t* ptr = begin_ + string_id.string_data_off_; local
30 return DecodeUnsignedLeb128(&ptr);
36 const uint8_t* ptr = begin_ + string_id.string_data_off_; local
37 *utf16_length = DecodeUnsignedLeb128(&ptr);
38 return reinterpret_cast<const char*>(ptr);
H A Doat.cc406 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local
407 const char* end = ptr + key_value_store_size_;
409 while (ptr < end) {
411 const char* str_end = ParseString(ptr, end);
413 if (strcmp(key, ptr) == 0) {
420 ptr = ParseString(str_end + 1, end) + 1;
432 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local
433 const char* end = ptr + key_value_store_size_;
436 while (ptr < end && counter >= 0) {
438 const char* str_end = ParseString(ptr, en
[all...]
H A Dtype_lookup_table.h125 const uint8_t* ptr = dex_file_.Begin() + str_offset; local
127 DecodeUnsignedLeb128(&ptr);
129 str, reinterpret_cast<const char*>(ptr)) == 0;
H A Dmem_map.cc136 bool MemMap::ContainedWithinExistingMap(uint8_t* ptr, size_t size, std::string* error_msg) { argument
137 uintptr_t begin = reinterpret_cast<uintptr_t>(ptr);
259 static inline void* TryMemMapLow4GB(void* ptr, argument
265 void* actual = mmap(ptr, page_aligned_byte_count, prot, flags, fd, offset);
807 for (uintptr_t ptr = next_mem_pos_; ptr < 4 * GB; ptr += kPageSize) {
809 // Find the first map which is address > ptr.
810 auto it = maps_->upper_bound(reinterpret_cast<void*>(ptr));
815 ptr
[all...]
H A Dclass_table.cc165 size_t ClassTable::WriteToMemory(uint8_t* ptr) const {
175 const size_t ret = combined.WriteToMemory(ptr);
177 if (kIsDebugBuild && ptr != nullptr) {
179 ClassSet class_set(ptr, /*make copy*/false, &read_count);
185 size_t ClassTable::ReadFromMemory(uint8_t* ptr) { argument
187 AddClassSet(ClassSet(ptr, /*make copy*/false, &read_count));
/art/runtime/base/
H A Dscoped_arena_allocator.h65 static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { argument
67 return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
94 uint8_t* ptr = top_ptr_; variable
95 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
96 ptr = AllocateFromNextArena(rounded_bytes);
99 top_ptr_ = ptr + rounded_bytes;
101 ptr += kArenaAlignment;
102 ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; variable
104 return ptr;
168 static void operator delete(void* ptr ATTRIBUTE_UNUSE
[all...]
H A Dscoped_arena_allocator.cc98 uint8_t* ptr = top_ptr_; local
99 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
100 ptr = AllocateFromNextArena(rounded_bytes);
101 CHECK(ptr != nullptr) << "Failed to allocate memory";
102 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
105 top_ptr_ = ptr + rounded_bytes;
106 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
107 return ptr;
H A Darena_allocator.h170 void MakeDefined(void* ptr, size_t size) { argument
172 DoMakeDefined(ptr, size);
175 void MakeUndefined(void* ptr, size_t size) { argument
177 DoMakeUndefined(ptr, size);
180 void MakeInaccessible(void* ptr, size_t size) { argument
182 DoMakeInaccessible(ptr, size);
187 void DoMakeDefined(void* ptr, size_t size);
188 void DoMakeUndefined(void* ptr, size_t size);
189 void DoMakeInaccessible(void* ptr, size_t size);
221 // Return true if ptr i
[all...]
H A Darena_object.h45 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; }
H A Dscoped_arena_containers.h207 ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const { argument
211 memset(ptr, kMagicFill, size);
212 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
214 CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed)
215 << "Freeing invalid object " << ptr;
216 ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree;
218 memset(ptr, kMagicFill, size);
223 void operator()(T* ptr) const {
224 if (ptr != nullptr) {
225 ptr
[all...]
H A Dhash_set.h211 HashSet(const uint8_t* ptr, bool make_copy_of_data, size_t* read_count) noexcept {
214 offset = ReadFromBytes(ptr, offset, &temp);
216 offset = ReadFromBytes(ptr, offset, &temp);
219 offset = ReadFromBytes(ptr, offset, &temp);
221 offset = ReadFromBytes(ptr, offset, &min_load_factor_);
222 offset = ReadFromBytes(ptr, offset, &max_load_factor_);
225 data_ = const_cast<T*>(reinterpret_cast<const T*>(ptr + offset));
232 offset = ReadFromBytes(ptr, offset, &data_[i]);
241 size_t WriteToMemory(uint8_t* ptr) const {
243 offset = WriteToBytes(ptr, offse
648 WriteToBytes(uint8_t* ptr, size_t offset, Elem n) argument
657 ReadFromBytes(const uint8_t* ptr, size_t offset, Elem* out) argument
[all...]
H A Darena_allocator.cc148 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) { argument
149 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
152 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) { argument
153 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
156 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) { argument
157 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
379 bool ArenaAllocator::Contains(const void* ptr) const {
380 if (ptr >= begin_ && ptr < end_) {
384 if (cur_arena->Contains(ptr)) {
[all...]
H A Dvariant_map.h235 auto* ptr = Get(key); local
236 return (ptr == nullptr) ? key.CreateDefaultValue() : *ptr;
259 TValue* ptr = Get(key); local
260 if (ptr != nullptr) {
261 return std::move(*ptr);
289 TValue* ptr = Get(key); local
290 if (ptr == nullptr) {
/art/compiler/utils/
H A Dswap_space.cc74 if (munmap(chunk.ptr, chunk.size) != 0) {
76 << static_cast<const void*>(chunk.ptr) << " size=" << chunk.size;
125 void* ret = old_chunk.ptr;
129 SpaceChunk new_chunk = { old_chunk.ptr + size, old_chunk.size - size };
143 uint8_t* ptr = reinterpret_cast<uint8_t*>( local
145 if (ptr == MAP_FAILED) {
154 SpaceChunk new_chunk = {ptr, next_part};
164 void SwapSpace::Free(void* ptr, size_t size) { argument
173 SpaceChunk chunk = { reinterpret_cast<uint8_t*>(ptr), size };
182 chunk.ptr
[all...]
H A Dswap_space.h39 void Free(void* ptr, size_t size) REQUIRES(!lock_);
48 uint8_t* ptr; member in struct:art::SwapSpace::SpaceChunk
52 return reinterpret_cast<uintptr_t>(ptr);
55 return reinterpret_cast<uintptr_t>(ptr) + size;
62 return reinterpret_cast<uintptr_t>(a.ptr) < reinterpret_cast<uintptr_t>(b.ptr);
/art/runtime/gc/space/
H A Dmemory_tool_malloc_space.h45 size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE
51 void RegisterRecentFree(mirror::Object* ptr ATTRIBUTE_UNUSED) OVERRIDE {}
/art/compiler/debug/dwarf/
H A Dwriter.h117 void PushData(const uint8_t* ptr, size_t num_bytes) { argument
118 data_->insert(data_->end(), ptr, ptr + num_bytes);
121 void PushData(const char* ptr, size_t num_bytes) { argument
122 data_->insert(data_->end(), ptr, ptr + num_bytes);
/art/runtime/arch/arm/
H A Dfault_handler_arm.cc96 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local
97 VLOG(signals) << "pc: " << std::hex << static_cast<void*>(ptr);
99 if (ptr == nullptr) {
105 uint32_t instr_size = GetInstructionSize(ptr);
121 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local
123 uint32_t instr_size = GetInstructionSize(ptr);
/art/runtime/base/unix_file/
H A Dfd_file.cc214 char* ptr = static_cast<char*>(buffer); local
216 ssize_t bytes_read = TEMP_FAILURE_RETRY(read_func(fd, ptr, byte_count, offset));
223 ptr += bytes_read; // Move the buffer forward.
242 const char* ptr = static_cast<const char*>(buffer); local
245 ? TEMP_FAILURE_RETRY(pwrite(fd_, ptr, byte_count, offset))
246 : TEMP_FAILURE_RETRY(write(fd_, ptr, byte_count));
251 ptr += bytes_written; // Move the buffer forward.

Completed in 2282 milliseconds

1234