/art/runtime/ |
H A D | reference_table_test.cc | 42 EXPECT_EQ(0U, rt.Size()); 47 EXPECT_EQ(0U, rt.Size()); 51 EXPECT_EQ(0U, rt.Size()); 56 EXPECT_EQ(1U, rt.Size()); 67 EXPECT_EQ(i + 2, rt.Size()); 86 EXPECT_EQ(10U, rt.Size()); 95 EXPECT_EQ(9 - i, rt.Size());
|
H A D | intern_table_test.cc | 49 TEST_F(InternTableTest, Size) { 52 EXPECT_EQ(0U, t.Size()); 58 EXPECT_EQ(1U, t.Size()); 60 EXPECT_EQ(2U, t.Size()); 110 EXPECT_EQ(4U, t.Size()); 121 EXPECT_EQ(2U, t.Size()); 127 EXPECT_EQ(3U, t.Size());
|
H A D | intern_table.cc | 38 size_t InternTable::Size() const { function in class:art::InternTable 40 return strong_interns_.Size() + weak_interns_.Size(); 45 return strong_interns_.Size(); 50 return weak_interns_.Size(); 158 if (section.Size() > 0) { 302 CHECK_LE(offset, intern_section.Size()); 335 CHECK_EQ(pre_zygote_table_.Size(), 0u); 372 VLOG(heap) << "Swapping " << pre_zygote_table_.Size() << " interns to the pre zygote table"; 415 size_t InternTable::Table::Size() cons function in class:art::InternTable::Table [all...] |
H A D | elf_file.h | 53 size_t Size() const;
|
H A D | reference_table.h | 48 size_t Size() const;
|
/art/runtime/base/ |
H A D | variant_map_test.cc | 70 EXPECT_EQ(size_t(2), fm.Size()); 77 EXPECT_EQ(size_t(0), fm.Size()); 102 EXPECT_EQ(size_t(0), fmEmpty.Size()); 108 EXPECT_EQ(size_t(2), fmFilled.Size()); 112 EXPECT_EQ(size_t(0), fmEmptyCopy.Size()); 116 EXPECT_EQ(size_t(2), fmFilledCopy.Size()); 123 EXPECT_EQ(size_t(2), fmFilledCopy2.Size()); 129 EXPECT_EQ(size_t(0), fmFilledCopy.Size()); 130 EXPECT_EQ(size_t(2), fmMoved.Size()); 139 EXPECT_EQ(size_t(0), fmFilledCopy2.Size()); [all...] |
/art/compiler/optimizing/ |
H A D | side_effects_analysis.h | 30 block_effects_(graph->GetArena(), graph->GetBlocks().Size(), SideEffects::None()), 31 loop_effects_(graph->GetArena(), graph->GetBlocks().Size(), SideEffects::None()) {}
|
H A D | parallel_move_resolver.cc | 41 for (size_t i = 0; i < moves_.Size(); ++i) { 52 for (size_t i = 0; i < moves_.Size(); ++i) { 132 for (size_t i = 0; i < moves_.Size(); ++i) { 186 for (size_t i = 0; i < moves_.Size(); ++i) { 209 for (size_t i = 0; i < moves_.Size(); ++i) { 231 for (size_t i = 0; i < moves_.Size(); ++i) { 237 for (size_t i = 0; i < moves_.Size(); ++i) { 300 for (size_t i = 0; i < moves_.Size(); ++i) { 313 for (size_t i = 0; i < moves_.Size(); ++i) { 333 for (size_t i = 0; i < moves_.Size(); [all...] |
H A D | stack_map_stream.cc | 34 current_entry_.dex_register_locations_start_index = dex_register_locations_.Size(); 35 current_entry_.inline_infos_start_index = inline_infos_.Size(); 84 size_t index = location_catalog_entries_.Size(); 109 stack_maps_size_ = stack_maps_.Size() 136 location_catalog_entry_index < location_catalog_entries_.Size(); 146 // Size of the map in bytes. 160 DexRegisterMap::SingleEntrySizeInBits(location_catalog_entries_.Size()) 170 for (size_t i = 0; i < stack_maps_.Size(); ++i) { 181 return inline_infos_.Size() * InlineInfo::SingleEntrySize() 205 code_info.SetNumberOfStackMaps(stack_maps_.Size()); [all...] |
H A D | dead_code_elimination.cc | 44 for (size_t i = 0, e = block->GetSuccessors().Size(); i < e; ++i) { 66 ArenaBitVector live_blocks(allocator, graph_->GetBlocks().Size(), false); 67 ArenaBitVector affected_loops(allocator, graph_->GetBlocks().Size(), false); 101 if (block->IsEntryBlock() || block->GetSuccessors().Size() != 1u) { 106 if (successor->IsExitBlock() || successor->GetPredecessors().Size() != 1u) {
|
H A D | nodes.cc | 28 block->SetBlockId(blocks_.Size()); 33 ArenaBitVector visiting(arena_, blocks_.Size(), false); 45 for (size_t i = 0, e = environment->Size(); i < e; ++i) { 54 for (size_t i = 0; i < blocks_.Size(); ++i) { 66 for (size_t i = 0; i < blocks_.Size(); ++i) { 70 for (size_t j = 0; j < block->GetSuccessors().Size(); ++j) { 88 for (size_t i = 0; i < block->GetSuccessors().Size(); i++) { 100 ArenaBitVector visited(arena_, blocks_.Size(), false); 137 GrowableArray<size_t> visits(arena_, blocks_.Size()); 138 visits.SetSize(blocks_.Size()); [all...] |
H A D | licm.cc | 45 for (size_t i = 0, e = environment->Size(); i < e; ++i) { 68 for (size_t i = 0, e = environment->Size(); i < e; ++i) { 83 ArenaBitVector visited(graph_->GetArena(), graph_->GetBlocks().Size(), false);
|
H A D | register_allocator.h | 72 return int_spill_slots_.Size() 73 + long_spill_slots_.Size() 74 + float_spill_slots_.Size() 75 + double_spill_slots_.Size();
|
H A D | side_effects_analysis.cc | 24 block_effects_.SetSize(graph_->GetBlocks().Size()); 25 loop_effects_.SetSize(graph_->GetBlocks().Size());
|
H A D | ssa_builder.h | 55 locals_for_(graph->GetArena(), graph->GetBlocks().Size()) { 56 locals_for_.SetSize(graph->GetBlocks().Size());
|
H A D | register_allocator.cc | 168 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) { 188 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) { 316 for (size_t safepoint_index = safepoints_.Size(); safepoint_index > 0; --safepoint_index) { 321 DCHECK(safepoint_index == safepoints_.Size() 327 DCHECK_EQ(safepoint_index, safepoints_.Size()); 438 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) { 445 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) { 453 for (size_t i = 0, e = temp_intervals_.Size(); i < e; ++i) { 483 for (size_t i = 0, e = intervals.Size(); i < e; ++i) { 558 for (size_t i = 0; i < inactive_.Size(); [all...] |
H A D | graph_checker.cc | 34 for (size_t i = 0, e = predecessors.Size(); i < e; ++i) { 43 for (size_t j = 0, f = p_successors.Size(); j < f; ++j) { 60 for (size_t i = 0, e = successors.Size(); i < e; ++i) { 69 for (size_t j = 0, f = s_predecessors.Size(); j < f; ++j) { 203 if ((use_index >= use->Size()) || (use->GetInstructionAt(use_index) != instruction)) { 286 if (block->GetSuccessors().Size() > 1) { 287 for (size_t j = 0; j < block->GetSuccessors().Size(); ++j) { 289 if (successor->GetPredecessors().Size() > 1) { 327 size_t num_preds = loop_header->GetPredecessors().Size(); 340 for (size_t i = 1, e = loop_header->GetPredecessors().Size(); [all...] |
H A D | pretty_printer.h | 77 for (size_t i = 0; i < predecessors.Size() -1; i++) { 86 for (size_t i = 0; i < successors.Size() - 1; i++) {
|
/art/cmdline/ |
H A D | token_range.h | 154 size_t Size() const { function in struct:art::TokenRange 160 return Size() > 0; 165 assert(offset < Size()); 177 if (Size() != other.Size()) { 186 assert(index >= 0 && static_cast<size_t>(index) < Size()); 196 if (Size() < other.Size()) { 200 auto& smaller = Size() < other.Size() [all...] |
/art/compiler/utils/ |
H A D | dex_cache_arrays_layout.h | 43 return Size() != 0u; 46 size_t Size() const { function in class:art::DexCacheArraysLayout
|
H A D | growable_array.h | 87 DCHECK(index <= Size()); 89 for (size_t i = Size() - 1; i > index; --i) { 144 size_t Size() const { return num_used_; } function in class:art::GrowableArray
|
H A D | assembler.cc | 53 CHECK_EQ(Size(), 0U); 73 MemoryRegion from(reinterpret_cast<void*>(contents()), Size()); 84 size_t old_size = Size(); 105 CHECK_EQ(Size(), old_size);
|
/art/runtime/base/unix_file/ |
H A D | fd_file_test.cc | 79 template <size_t Size> 80 static void NullTerminateCharArray(char (&array)[Size]) { 81 array[Size - 1] = '\0';
|
/art/runtime/jit/ |
H A D | jit_code_cache.cc | 46 VLOG(jit) << "Created jit code cache size=" << PrettySize(mem_map->Size()); 48 uint8_t* divider = mem_map->Begin() + RoundUp(mem_map->Size() / 4, kPageSize);
|
/art/cmdline/detail/ |
H A D | cmdline_parse_argument_detail.h | 76 // wildcard present). 0 means no match. If the Size() tokens are returned. 215 return std::min(min, cur.Size()); 220 return std::max(max, cur.Size()); 314 assert(arguments.Size() > 0); 321 if (best_match_size > arguments.Size()) { 324 return CmdlineResult(CmdlineResult::kUnknown, "Size mismatch"); 328 *consumed_tokens = best_match_arg_def->Size(); 368 sub_idx < def_split_wildcards.Size() && sub_idx < arg_matches->Size(); ++sub_idx) {
|