/art/runtime/ |
H A D | leb128.h | 29 const uint8_t* ptr = *data; local 30 int result = *(ptr++); 32 int cur = *(ptr++); 35 cur = *(ptr++); 38 cur = *(ptr++); 43 cur = *(ptr++); 49 *data = ptr; 65 const uint8_t* ptr = *data; local 66 int32_t result = *(ptr++); 70 int cur = *(ptr [all...] |
H A D | dex_file-inl.h | 29 const byte* ptr = begin_ + string_id.string_data_off_; local 30 return DecodeUnsignedLeb128(&ptr); 36 const byte* ptr = begin_ + string_id.string_data_off_; local 37 *utf16_length = DecodeUnsignedLeb128(&ptr); 38 return reinterpret_cast<const char*>(ptr);
|
H A D | oat.cc | 414 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local 415 const char* end = ptr + key_value_store_size_; 417 while (ptr < end) { 419 const char* str_end = ParseString(ptr, end); 421 if (strcmp(key, ptr) == 0) { 428 ptr = ParseString(str_end + 1, end) + 1; 440 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); local 441 const char* end = ptr + key_value_store_size_; 444 while (ptr < end && counter >= 0) { 446 const char* str_end = ParseString(ptr, en [all...] |
H A D | trace.cc | 494 uint8_t* ptr = buf + kTraceHeaderLength; local 497 while (ptr < end) { 498 uint32_t tmid = ptr[2] | (ptr[3] << 8) | (ptr[4] << 16) | (ptr[5] << 24); 502 ptr += GetRecordSize(clock_source); 674 uint8_t* ptr = buf_.get() + old_offset; local 675 Append2LE(ptr, thread->GetTid()); 676 Append4LE(ptr 690 uint8_t* ptr = buf_.get() + kTraceHeaderLength; local [all...] |
H A D | dex_file_verifier.h | 46 // Check a list. The head is assumed to be at *ptr, and elements to be of size element_size. If 47 // successful, the ptr will be moved forward the amount covered by the list. 48 bool CheckList(size_t element_size, const char* label, const byte* *ptr); 84 uint16_t FindFirstClassDataDefiner(const byte* ptr, bool* success); 85 uint16_t FindFirstAnnotationsDirectoryDefiner(const byte* ptr, bool* success);
|
H A D | mem_map.cc | 301 for (uintptr_t ptr = next_mem_pos_; ptr < 4 * GB; ptr += kPageSize) { 302 if (4U * GB - ptr < page_aligned_byte_count) { 306 ptr = LOW_MEM_START - kPageSize; 319 for (tail_ptr = ptr; tail_ptr < ptr + page_aligned_byte_count; tail_ptr += kPageSize) { 331 actual = mmap(reinterpret_cast<void*>(ptr), page_aligned_byte_count, prot, flags, fd.get(), 345 ptr = tail_ptr;
|
/art/compiler/utils/ |
H A D | scoped_arena_allocator.h | 71 uint8_t* ptr = top_ptr_; variable 72 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { 73 ptr = AllocateFromNextArena(rounded_bytes); 76 top_ptr_ = ptr + rounded_bytes; 77 return ptr; 127 static void operator delete(void* ptr) { UNUSED(ptr); } argument
|
H A D | scoped_arena_allocator.cc | 96 uint8_t* ptr = top_ptr_; local 97 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { 98 ptr = AllocateFromNextArena(rounded_bytes); 101 top_ptr_ = ptr + rounded_bytes; 102 VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes); 103 VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes); 104 return ptr;
|
H A D | swap_space.h | 37 uint8_t* ptr; member in struct:art::SpaceChunk 41 return reinterpret_cast<uintptr_t>(ptr); 44 return reinterpret_cast<uintptr_t>(ptr) + size; 49 return (lhs.size == rhs.size) && (lhs.ptr == rhs.ptr); 55 return reinterpret_cast<uintptr_t>(a.ptr) < reinterpret_cast<uintptr_t>(b.ptr); 65 void Free(void* ptr, size_t size) LOCKS_EXCLUDED(lock_);
|
H A D | swap_space.cc | 122 void* ret = old_chunk.ptr; 126 SpaceChunk new_chunk = { old_chunk.ptr + size, old_chunk.size - size }; 140 uint8_t* ptr = reinterpret_cast<uint8_t*>( local 142 if (ptr == MAP_FAILED) { 152 SpaceChunk new_chunk = {ptr, next_part}; 181 chunk.ptr -= prev->size;
|
H A D | arena_allocator.cc | 241 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) { 242 CHECK_EQ(*ptr, 0U);
|
/art/disassembler/ |
H A D | disassembler_arm64.cc | 30 static uint32_t ReadU32(const uint8_t* ptr) { argument 31 return *((const uint32_t*)ptr);
|
H A D | disassembler_mips.cc | 166 static uint32_t ReadU32(const uint8_t* ptr) { argument 168 return ptr[0] | (ptr[1] << 8) | (ptr[2] << 16) | (ptr[3] << 24);
|
/art/runtime/gc/space/ |
H A D | valgrind_malloc_space.h | 40 size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE 46 void RegisterRecentFree(mirror::Object* ptr) OVERRIDE {
|
H A D | dlmalloc_space.cc | 159 size_t DlMallocSpace::Free(Thread* self, mirror::Object* ptr) { argument 162 CHECK(ptr != nullptr); 163 CHECK(Contains(ptr)) << "Free (" << ptr << ") not in bounds of heap " << *this; 165 const size_t bytes_freed = AllocationSizeNonvirtual(ptr, nullptr); 167 RegisterRecentFree(ptr); 169 mspace_free(mspace_, ptr); 179 mirror::Object* ptr = ptrs[i]; local 185 bytes_freed += AllocationSizeNonvirtual(ptr, nullptr);
|
H A D | valgrind_malloc_space-inl.h | 74 size_t ValgrindMallocSpace<S, A>::Free(Thread* self, mirror::Object* ptr) { argument 75 void* obj_after_rdz = reinterpret_cast<void*>(ptr); 79 AllocationSize(ptr, &usable_size);
|
H A D | rosalloc_space.cc | 174 size_t RosAllocSpace::Free(Thread* self, mirror::Object* ptr) { argument 176 CHECK(ptr != NULL); 177 CHECK(Contains(ptr)) << "Free (" << ptr << ") not in bounds of heap " << *this; 181 RegisterRecentFree(ptr); 183 return rosalloc_->Free(self, ptr);
|
H A D | large_object_space_test.cc | 106 mirror::Object* ptr = los_->Alloc(self, size_, &alloc_size, nullptr); local 110 los_->Free(self, ptr);
|
H A D | malloc_space.h | 65 virtual size_t Free(Thread* self, mirror::Object* ptr) 153 virtual void RegisterRecentFree(mirror::Object* ptr)
|
/art/compiler/llvm/ |
H A D | ir_builder.h | 79 ::llvm::LoadInst* CreateLoad(::llvm::Value* ptr, ::llvm::MDNode* tbaa_info) { argument 80 ::llvm::LoadInst* inst = LLVMIRBuilder::CreateLoad(ptr); 85 ::llvm::StoreInst* CreateStore(::llvm::Value* val, ::llvm::Value* ptr, ::llvm::MDNode* tbaa_info) { argument 86 ::llvm::StoreInst* inst = LLVMIRBuilder::CreateStore(val, ptr); 92 CreateAtomicCmpXchgInst(::llvm::Value* ptr, ::llvm::Value* cmp, ::llvm::Value* val, argument 95 LLVMIRBuilder::CreateAtomicCmpXchg(ptr, cmp, val, ::llvm::Acquire); 115 ::llvm::LoadInst* CreateLoad(::llvm::Value* ptr, TBAASpecialType special_ty) { argument 116 return CreateLoad(ptr, mdb_.GetTBAASpecialType(special_ty)); 119 ::llvm::StoreInst* CreateStore(::llvm::Value* val, ::llvm::Value* ptr, TBAASpecialType special_ty) { argument 121 return CreateStore(val, ptr, mdb 124 CreateLoad(::llvm::Value* ptr, TBAASpecialType special_ty, JType j_ty) argument 128 CreateStore(::llvm::Value* val, ::llvm::Value* ptr, TBAASpecialType special_ty, JType j_ty) argument [all...] |
/art/runtime/base/unix_file/ |
H A D | fd_file.cc | 182 char* ptr = static_cast<char*>(buffer); local 184 ssize_t bytes_read = TEMP_FAILURE_RETRY(read(fd_, ptr, byte_count)); 191 ptr += bytes_read; // Move the buffer forward. 197 const char* ptr = static_cast<const char*>(buffer); local 200 ssize_t bytes_written = TEMP_FAILURE_RETRY(write(fd_, ptr, byte_count)); 205 ptr += bytes_written; // Move the buffer forward.
|
/art/runtime/gc/allocator/ |
H A D | rosalloc.h | 219 void FreeSlot(void* ptr); 221 size_t MarkBulkFreeBitMap(void* ptr); 223 void MarkThreadLocalFreeBitMap(void* ptr); 255 size_t MarkFreeBitMapShared(void* ptr, uint32_t* free_bit_map_base, const char* caller_name); 488 size_t FreePages(Thread* self, void* ptr, bool already_zero) EXCLUSIVE_LOCKS_REQUIRED(lock_); 500 size_t FreeFromRun(Thread* self, void* ptr, Run* run) 511 size_t FreeInternal(Thread* self, void* ptr) LOCKS_EXCLUDED(lock_); 535 size_t Free(Thread* self, void* ptr) 540 size_t UsableSize(void* ptr);
|
H A D | rosalloc.cc | 289 size_t RosAlloc::FreePages(Thread* self, void* ptr, bool already_zero) { argument 291 size_t pm_idx = ToPageMapIndex(ptr); 305 << static_cast<int>(pm_type) << ", ptr=" << std::hex 306 << reinterpret_cast<intptr_t>(ptr); 322 const uword* word_ptr = reinterpret_cast<uword*>(ptr); 328 memset(ptr, 0, byte_size); 332 LOG(INFO) << __PRETTY_FUNCTION__ << " : 0x" << std::hex << reinterpret_cast<intptr_t>(ptr) 333 << "-0x" << (reinterpret_cast<intptr_t>(ptr) + byte_size) 338 FreePageRun* fpr = reinterpret_cast<FreePageRun*>(ptr); 485 size_t RosAlloc::FreeInternal(Thread* self, void* ptr) { argument 530 Free(Thread* self, void* ptr) argument 741 FreeFromRun(Thread* self, void* ptr, Run* run) argument 891 FreeSlot(void* ptr) argument 994 MarkThreadLocalFreeBitMap(void* ptr) argument 999 MarkBulkFreeBitMap(void* ptr) argument 1003 MarkFreeBitMapShared(void* ptr, uint32_t* free_bit_map_base, const char* caller_name) argument 1166 void* ptr = ptrs[i]; local 1440 UsableSize(void* ptr) argument [all...] |
/art/runtime/arch/arm/ |
H A D | fault_handler_arm.cc | 96 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local 97 VLOG(signals) << "pc: " << std::hex << static_cast<void*>(ptr); 98 uint32_t instr_size = GetInstructionSize(ptr); 113 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); local 115 uint32_t instr_size = GetInstructionSize(ptr);
|
/art/runtime/arch/x86/ |
H A D | fault_handler_x86.cc | 354 uint8_t* ptr = pc - sizeof(checkinst1); local 356 while (ptr > limit) { 357 if (memcmp(ptr, checkinst1, sizeof(checkinst1)) == 0) { 361 ptr -= 1;
|