Searched defs:bytes (Results 1 - 25 of 31) sorted by relevance

12

/art/test/102-concurrent-gc/src/
H A DMain.java24 public byte[] bytes; field in class:Main.ByteContainer
37 l[index].bytes = new byte[bufferSize];
57 byte[] temp = l[a].bytes;
58 l[a].bytes = l[b].bytes;
59 l[b].bytes = temp;
/art/cmdline/
H A Dmemory_representation.h28 // An integral representation of bytes of memory.
34 static Memory<kDivisor> FromBytes(size_t bytes) { argument
35 assert(bytes % kDivisor == 0);
36 return Memory<kDivisor>(bytes);
/art/test/004-NativeAllocations/src/
H A DMain.java31 private int bytes; field in class:Main.NativeAllocation
33 NativeAllocation(int bytes, boolean testingDeadlock) throws Exception { argument
34 this.bytes = bytes;
35 register_native_allocation.invoke(runtime, bytes);
38 nativeBytes += bytes;
48 nativeBytes -= bytes;
50 register_native_free.invoke(runtime, bytes);
/art/runtime/base/
H A Dscoped_arena_allocator.cc84 // Update how many bytes we have allocated into the arena so that the arena pool knows how
94 void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) { argument
95 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
101 CurrentStats()->RecordAlloc(bytes, kind);
103 VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes);
104 VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes);
H A Dallocator.h77 // Running count of number of bytes used for this kind of allocation. Increased by allocations,
81 // Largest value of bytes used seen.
84 // Total number of bytes allocated of this kind.
89 inline void RegisterAllocation(AllocatorTag tag, size_t bytes) { argument
90 g_total_bytes_used[tag].FetchAndAddSequentiallyConsistent(bytes);
91 size_t new_bytes = g_bytes_used[tag].FetchAndAddSequentiallyConsistent(bytes) + bytes;
97 inline void RegisterFree(AllocatorTag tag, size_t bytes) { argument
98 g_bytes_used[tag].FetchAndSubSequentiallyConsistent(bytes);
H A Darena_allocator.cc76 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) { argument
77 alloc_stats_[kind] += bytes;
263 // Update how many bytes we have allocated into the arena so that the arena pool knows how
269 void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { argument
270 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
285 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
H A Darena_allocator.h85 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); } argument
101 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
209 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
211 return AllocValgrind(bytes, kind);
213 bytes = RoundUp(bytes, kAlignment);
214 if (UNLIKELY(ptr_ + bytes > end_)) {
216 ObtainNewArenaForAllocation(bytes); variable
221 ArenaAllocatorStats::RecordAlloc(bytes, kin
[all...]
/art/runtime/gc/space/
H A Dbump_pointer_space.h136 bool AllocNewTlab(Thread* self, size_t bytes);
148 // Record objects / bytes freed.
149 void RecordFree(int32_t objects, int32_t bytes) { argument
151 bytes_allocated_.FetchAndSubSequentiallyConsistent(bytes);
163 // Allocate a raw block of bytes.
164 uint8_t* AllocBlock(size_t bytes) EXCLUSIVE_LOCKS_REQUIRED(block_lock_);
185 size_t size_; // Size of the block in bytes, does not include the header.
H A Dregion_space-inl.h158 uint64_t bytes = 0; local
167 bytes += r->BytesAllocated();
171 bytes += r->BytesAllocated();
176 bytes += r->BytesAllocated();
181 bytes += r->BytesAllocated();
188 return bytes;
193 uint64_t bytes = 0; local
202 bytes += r->ObjectsAllocated();
206 bytes += r->ObjectsAllocated();
211 bytes
[all...]
H A Dbump_pointer_space.cc140 uint8_t* BumpPointerSpace::AllocBlock(size_t bytes) { argument
141 bytes = RoundUp(bytes, kAlignment);
146 AllocNonvirtualWithoutAccounting(bytes + sizeof(BlockHeader)));
149 header->size_ = bytes; // Write out the block header.
255 bool BumpPointerSpace::AllocNewTlab(Thread* self, size_t bytes) { argument
258 uint8_t* start = AllocBlock(bytes);
262 self->SetTlab(start, start + bytes);
270 << max_contiguous_allocation << " bytes)";
H A Dlarge_object_space.cc259 // Returns the allocation size in bytes.
304 // Returns how many free bytes there is before the block.
309 void SetPrevFreeBytes(size_t bytes) { argument
310 DCHECK_ALIGNED(bytes, FreeListSpace::kAlignment);
311 prev_free_ = bytes / FreeListSpace::kAlignment;
545 << " of length " << size << " bytes\n";
548 << " of length " << size << " bytes\n";
554 << " of length " << free_end_ << " bytes\n";
590 context->freed.bytes += space->FreeList(self, num_ptrs, ptrs);
H A Dregion_space.h409 size_t bytes = static_cast<size_t>(top_ - begin_); local
410 DCHECK_LE(bytes, kRegionSize);
411 return bytes;
471 size_t live_bytes_; // The live bytes. Used to compute the live percent.
/art/runtime/jdwp/
H A Djdwp_bits.h35 static inline void Append1BE(std::vector<uint8_t>& bytes, uint8_t value) { argument
36 bytes.push_back(value);
39 static inline void Append2BE(std::vector<uint8_t>& bytes, uint16_t value) { argument
40 bytes.push_back(static_cast<uint8_t>(value >> 8));
41 bytes.push_back(static_cast<uint8_t>(value));
44 static inline void Append4BE(std::vector<uint8_t>& bytes, uint32_t value) { argument
45 bytes.push_back(static_cast<uint8_t>(value >> 24));
46 bytes.push_back(static_cast<uint8_t>(value >> 16));
47 bytes.push_back(static_cast<uint8_t>(value >> 8));
48 bytes
51 Append8BE(std::vector<uint8_t>& bytes, uint64_t value) argument
62 AppendUtf16BE(std::vector<uint8_t>& bytes, const uint16_t* chars, size_t char_count) argument
[all...]
H A Djdwp_request.cc28 Request::Request(const uint8_t* bytes, uint32_t available) : p_(bytes) { argument
30 end_ = bytes + byte_count_;
48 CHECK(p_ == end_) << "read too few bytes: " << (end_ - p_);
50 CHECK(p_ == end_) << "read too many bytes: " << (p_ - end_);
/art/test/003-omnibus-opcodes/src/
H A DArray.java25 static void checkBytes(byte[] bytes) { argument
26 Main.assertTrue(bytes[0] == 0);
27 Main.assertTrue(bytes[1] == -1);
28 Main.assertTrue(bytes[2] == -2);
29 Main.assertTrue(bytes[3] == -3);
30 Main.assertTrue(bytes[4] == -4);
/art/runtime/
H A Ddex_file_verifier_test.cc65 // the final = symbols are read and used to trim the remaining bytes
170 static std::unique_ptr<const DexFile> FixChecksumAndOpen(uint8_t* bytes, size_t length, argument
174 CHECK(bytes != nullptr);
177 FixUpChecksum(bytes);
182 if (!file->WriteFully(bytes, length)) {
H A Dthread-inl.h203 inline mirror::Object* Thread::AllocTlab(size_t bytes) { argument
204 DCHECK_GE(TlabSize(), bytes); local
207 tlsPtr_.thread_local_pos += bytes;
H A Doat.cc174 const uint8_t* bytes = reinterpret_cast<const uint8_t*>(data); local
175 adler32_checksum_ = adler32(adler32_checksum_, bytes, length);
H A Djni_internal_test.cc1480 char bytes[4] = { 'x', 'x', 'x', 'x' }; local
1481 env_->GetStringUTFRegion(s, 1, 2, &bytes[1]);
1482 EXPECT_EQ('x', bytes[0]);
1483 EXPECT_EQ('e', bytes[1]);
1484 EXPECT_EQ('l', bytes[2]);
1485 EXPECT_EQ('x', bytes[3]);
2041 char bytes[1024]; local
2042 jobject buffer = env_->NewDirectByteBuffer(bytes, sizeof(bytes));
2045 ASSERT_EQ(env_->GetDirectBufferAddress(buffer), bytes);
[all...]
/art/runtime/gc/accounting/
H A Dspace_bitmap.h47 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
158 // Size in bytes of the memory that the bitmaps spans.
163 void SetHeapSize(size_t bytes) { argument
165 bitmap_size_ = OffsetToIndex(bytes) * sizeof(intptr_t);
166 CHECK_EQ(HeapSize(), bytes); local
/art/runtime/gc/allocator/
H A Drosalloc-inl.h47 uint8_t* bytes = reinterpret_cast<uint8_t*>(m); local
49 DCHECK_EQ(bytes[i], 0);
/art/runtime/native/
H A Dorg_apache_harmony_dalvik_ddmc_DdmVmInternal.cc100 * (1b) bytes per entry
121 std::vector<uint8_t>& bytes = *reinterpret_cast<std::vector<uint8_t>*>(context); local
122 JDWP::Append4BE(bytes, t->GetThreadId());
123 JDWP::Append1BE(bytes, Dbg::ToJdwpThreadStatus(t->GetState()));
124 JDWP::Append4BE(bytes, t->GetTid());
125 JDWP::Append4BE(bytes, utime);
126 JDWP::Append4BE(bytes, stime);
127 JDWP::Append1BE(bytes, t->IsDaemon());
131 std::vector<uint8_t> bytes; local
140 JDWP::Append1BE(bytes, kThstHeaderLe
[all...]
H A Ddalvik_system_VMRuntime.cc206 static void VMRuntime_registerNativeAllocation(JNIEnv* env, jobject, jint bytes) { argument
207 if (UNLIKELY(bytes < 0)) {
209 ThrowRuntimeException("allocation size negative %d", bytes);
212 Runtime::Current()->GetHeap()->RegisterNativeAllocation(env, static_cast<size_t>(bytes));
215 static void VMRuntime_registerNativeFree(JNIEnv* env, jobject, jint bytes) { argument
216 if (UNLIKELY(bytes < 0)) {
218 ThrowRuntimeException("allocation size negative %d", bytes);
221 Runtime::Current()->GetHeap()->RegisterNativeFree(env, static_cast<size_t>(bytes));
/art/test/407-arrays/src/
H A DMain.java32 static void $opt$testReads(boolean[] bools, byte[] bytes, char[] chars, short[] shorts, argument
38 assertEquals(0, bytes[0]);
39 assertEquals(0, bytes[index]);
63 static void $opt$testWrites(boolean[] bools, byte[] bytes, char[] chars, short[] shorts, argument
71 bytes[0] = -4;
72 assertEquals(-4, bytes[0]);
73 bytes[index] = -8;
74 assertEquals(-8, bytes[index]);
/art/runtime/gc/collector/
H A Dgarbage_collector.h39 : objects(num_objects), bytes(num_bytes) {}
42 bytes += other.bytes;
46 // Freed bytes are signed since the GC can free negative bytes if it promotes objects to a space
48 int64_t bytes; member in struct:art::gc::collector::ObjectBytePair
68 return freed_.bytes;
71 return freed_los_.bytes;
157 // Returns the estimated throughput in bytes / second.

Completed in 340 milliseconds

12