Searched refs:code_size (Results 1 - 23 of 23) sorted by relevance

/art/compiler/debug/
H A Dmethod_debug_info.h39 uint32_t code_size; member in struct:art::debug::MethodDebugInfo
H A Delf_symtab_writer.h86 symtab->Add(name_offset, text, address, info.code_size, STB_GLOBAL, STT_FUNC);
H A Delf_debug_writer.cc64 cu.code_end = std::max(cu.code_end, mi.code_address + mi.code_size);
194 info.code_size = 0; // The symbol lasts until the next symbol.
H A Delf_debug_loc_writer.h115 DCHECK_LE(pc_offset, method_info->code_size);
131 : method_info->code_address + method_info->code_size - compilation_unit_code_address;
H A Delf_debug_line_writer.h262 opcodes.AdvancePC(method_address + mi->code_size);
H A Delf_debug_frame_writer.h234 code_address, mi->code_size,
H A Delf_debug_info_writer.h171 info_.WriteUdata(DW_AT_high_pc, mi->code_size);
/art/runtime/
H A Doat_quick_method_header.cc30 uint32_t code_size)
33 code_size_(code_size) {}
25 OatQuickMethodHeader( uint32_t vmap_table_offset, uint32_t frame_size_in_bytes, uint32_t core_spill_mask, uint32_t fp_spill_mask, uint32_t code_size) argument
H A Doat_quick_method_header.h37 uint32_t code_size = 0U);
H A Dstack.cc645 uint32_t code_size = OatQuickMethodHeader::FromEntryPoint(code)->code_size_; local
647 CHECK(code_start <= pc && pc <= (code_start + code_size))
651 << " code_size=" << code_size; local
H A Dutils.cc1102 uintptr_t code_size = reinterpret_cast<const OatQuickMethodHeader*>(code)[-1].code_size_; variable
1103 return code <= pc && pc <= (code + code_size);
/art/compiler/
H A Dexception_test.cc56 uint32_t code_size = 12; local
57 for (size_t i = 0 ; i < code_size; i++) {
77 OatQuickMethodHeader method_header(stack_maps_offset, 4 * sizeof(void*), 0u, 0u, code_size);
87 fake_header_code_and_maps_.data() + (fake_header_code_and_maps_.size() - code_size);
96 (fake_header_code_and_maps_.size() - code_size)));
H A Dcommon_compiler_test.cc57 uint32_t code_size = code.size(); local
58 CHECK_NE(0u, code_size);
66 code_size);
71 const size_t size = vmap_table.size() + sizeof(method_header) + code_size;
78 const void* unaligned_code_ptr = chunk->data() + (size - code_size);
85 CHECK_EQ(code_ptr, static_cast<const void*>(chunk->data() + (chunk->size() - code_size)));
H A Doat_writer.cc688 uint32_t code_size = quick_code.size() * sizeof(uint8_t);
712 if (code_size != 0) {
745 code_size);
750 offset_ += code_size;
753 uintptr_t base_loc = offset_ - code_size - writer_->oat_header_->GetExecutableOffset();
763 // Exclude quickened dex methods (code_size == 0) since they have no native code.
764 if (compiler_options.GenerateAnyDebugInfo() && code_size != 0) {
780 info.code_size = code_size;
1003 uint32_t code_size
[all...]
H A Doat_test.cc83 size_t code_size = quick_code.size() * sizeof(quick_code[0]); local
84 EXPECT_EQ(0, memcmp(quick_oat_code, &quick_code[0], code_size))
85 << PrettyMethod(method) << " " << code_size;
86 CHECK_EQ(0, memcmp(quick_oat_code, &quick_code[0], code_size));
/art/runtime/jit/
H A Djit_code_cache.cc98 size_t code_size = max_capacity - data_size; local
99 DCHECK_EQ(code_size + data_size, max_capacity);
112 code_size = initial_capacity - data_size;
113 DCHECK_EQ(code_size + data_size, initial_capacity);
115 code_map, data_map, code_size, data_size, max_capacity, garbage_collect_code);
204 size_t code_size,
213 code_size,
225 code_size,
315 size_t code_size,
320 size_t total_size = header_size + code_size;
197 CommitCode(Thread* self, ArtMethod* method, const uint8_t* vmap_table, size_t frame_size_in_bytes, size_t core_spill_mask, size_t fp_spill_mask, const uint8_t* code, size_t code_size, bool osr) argument
308 CommitCodeInternal(Thread* self, ArtMethod* method, const uint8_t* vmap_table, size_t frame_size_in_bytes, size_t core_spill_mask, size_t fp_spill_mask, const uint8_t* code, size_t code_size, bool osr) argument
1005 AllocateCode(size_t code_size) argument
[all...]
H A Djit_code_cache.h100 size_t code_size,
209 size_t code_size,
260 uint8_t* AllocateCode(size_t code_size) REQUIRES(lock_);
/art/compiler/debug/dwarf/
H A Dheaders.h84 uint64_t code_size,
114 writer.PushUint64(code_size);
117 writer.PushUint32(code_size);
80 WriteFDE(bool is64bit, uint64_t section_address, uint64_t cie_address, uint64_t code_address, uint64_t code_size, const ArrayRef<const uint8_t>& opcodes, CFIFormat format, uint64_t buffer_address, std::vector<uint8_t>* buffer, std::vector<uintptr_t>* patch_locations) argument
/art/compiler/utils/arm/
H A Dassembler_thumb2.cc107 uint32_t code_size = buffer_.Size(); local
110 BindLabel(label, code_size);
111 code_size += lit.GetSize();
113 return code_size;
116 void Thumb2Assembler::BindJumpTables(uint32_t code_size) { argument
119 BindLabel(label, code_size);
120 code_size += table.GetSize();
243 uint32_t code_size = buffer_.Size(); local
244 DCHECK_ALIGNED(code_size, 2);
245 if ((code_size
262 uint32_t code_size = buffer_.Size(); local
[all...]
H A Dassembler_thumb2.h610 void Emit(AssemblerBuffer* buffer, uint32_t code_size) const;
836 void BindJumpTables(uint32_t code_size);
/art/oatdump/
H A Doatdump.cc265 info.code_size = method_header->GetCodeSize();
799 uint32_t code_size = oat_method.GetQuickCodeSize(); local
801 if (resolved_addr2instr_ > code_offset + code_size) {
923 uint64_t aligned_code_end = aligned_code_begin + code_size;
931 code_size,
946 code_size, code_size_offset);
953 } else if (code_size > kMaxCodeSize) {
958 code_size, kMaxCodeSize,
959 code_size, code_size_offset);
1218 bool bad_input, size_t code_size) {
1216 DumpCode(VariableIndentationOutputStream* vios, const OatFile::OatMethod& oat_method, const DexFile::CodeItem* code_item, bool bad_input, size_t code_size) argument
[all...]
/art/compiler/optimizing/
H A Dcode_generator.cc260 size_t code_size = GetAssembler()->CodeSize(); local
261 uint8_t* buffer = allocator->Allocate(code_size);
263 MemoryRegion code(buffer, code_size);
H A Doptimizing_compiler.cc957 info.code_size = code_allocator.GetSize();

Completed in 330 milliseconds