Searched defs:memory (Results 1 - 23 of 23) sorted by relevance

/frameworks/ml/nn/runtime/
H A DMemory.cpp39 LOG(ERROR) << "Request size larger than the memory size.";
111 uint32_t MemoryTracker::add(const Memory* memory) { argument
112 VLOG(MODEL) << __func__ << " for " << memory; local
113 // See if we already have this memory. If so,
115 auto i = mKnown.find(memory);
127 mKnown[memory] = idx;
128 mMemories.push_back(memory);
H A DExecutionBuilder.h73 const Memory* memory, size_t offset, size_t length);
77 const Memory* memory, size_t offset, size_t length);
91 // 2. Once we have all the inputs and outputs, if needed, allocate shared memory for
92 // the m*Buffers entries. Copy the input values into the shared memory.
93 // We do this to avoid creating a lot of shared memory objects if we have a lot of
139 int setInputFromTemporaryMemory(uint32_t inputIndex, const Memory* memory, uint32_t offset) { argument
141 memory, offset,
144 int setOutputFromTemporaryMemory(uint32_t outputIndex, const Memory* memory, uint32_t offset) { argument
146 memory, offset,
160 int allocatePointerArgumentsToPool(std::vector<ModelArgumentInfo>* args, Memory* memory);
[all...]
H A DModelBuilder.cpp106 // We keep track of the buffers. We'll allocate the shared memory only
117 // Calculate the size of the shared memory needed for all the large values.
118 // Also sets the offset for each value within the memory.
128 // Allocated the shared memory.
142 // Copy the values to this memory.
152 int ModelBuilder::setOperandValueFromMemory(uint32_t index, const Memory* memory, uint32_t offset, argument
167 // TODO validate does not exceed length of memory
170 .poolIndex = mMemories.add(memory), .offset = offset, .length = neededLength};
H A DExecutionBuilder.cpp126 const Memory* memory, size_t offset, size_t length) {
135 if (!memory->validateSize(offset, length)) {
139 uint32_t poolIndex = mMemories.add(memory);
166 const Memory* memory, size_t offset, size_t length) {
175 if (!memory->validateSize(offset, length)) {
179 uint32_t poolIndex = mMemories.add(memory);
385 Memory* memory) {
405 memory->create(total); // TODO check error
406 mMemories.add(memory);
449 const Memory* memory local
125 setInputFromMemory(uint32_t index, const ANeuralNetworksOperandType* type, const Memory* memory, size_t offset, size_t length) argument
165 setOutputFromMemory(uint32_t index, const ANeuralNetworksOperandType* type, const Memory* memory, size_t offset, size_t length) argument
384 allocatePointerArgumentsToPool(std::vector<ModelArgumentInfo>* args, Memory* memory) argument
458 setInputOrOutputFromTemporaryMemory(const Operand& inputOrOutputOperand, const Memory* memory, uint32_t offset, ModelArgumentInfo* inputOrOutputInfo) argument
[all...]
H A DExecutionPlan.cpp179 const Memory* memory = fromModel.getMemories()[operand.location.poolIndex]; local
180 n = mSubModel->setOperandValueFromMemory(*toOperandIndex, memory,
435 // 1) Adopt a memory layout scheme analogous to stack allocation,
446 // system limits the number of shared memory objects, which are
H A DNeuralNetworks.cpp33 #include <memory>
220 ANeuralNetworksMemory** memory) {
221 *memory = nullptr;
230 *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
234 void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory) { argument
236 Memory* m = reinterpret_cast<Memory*>(memory);
291 const ANeuralNetworksMemory* memory,
293 if (!model || !memory) {
297 const Memory* mem = reinterpret_cast<const Memory*>(memory);
400 const ANeuralNetworksMemory* memory, size_
219 ANeuralNetworksMemory_createFromFd(size_t size, int prot, int fd, size_t offset, ANeuralNetworksMemory** memory) argument
290 ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index, const ANeuralNetworksMemory* memory, size_t offset, size_t length) argument
398 ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* execution, int32_t index, const ANeuralNetworksOperandType* type, const ANeuralNetworksMemory* memory, size_t offset, size_t length) argument
423 ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* execution, int32_t index, const ANeuralNetworksOperandType* type, const ANeuralNetworksMemory* memory, size_t offset, size_t length) argument
[all...]
/frameworks/compile/mclinker/lib/MC/
H A DInputBuilder.cpp124 MemoryArea* memory = m_pMemFactory->produce(pInput.path(), pMode, pPerm); local
125 pInput.setMemArea(memory);
130 MemoryArea* memory = m_pMemFactory->produce(pMemBuffer, pSize); local
131 pInput.setMemArea(memory);
/frameworks/ml/nn/common/include/
H A DCpuExecutor.h45 // to memory we have allocated for an temporary operand.
62 // Used to keep a pointer to each of the memory pools.
64 sp<IMemory> memory; member in struct:android::nn::RunTimePoolInfo
91 // Decrement the usage count for the operands listed. Frees the memory
/frameworks/av/include/media/stagefright/
H A DMediaBuffer.h53 // allocations larger than or equal to this will use shared memory.
132 static bool isDeadObject(const sp<IMemory> &memory) { argument
133 if (memory.get() == nullptr || memory->pointer() == nullptr) return false;
134 return reinterpret_cast<SharedControl *>(memory->pointer())->isDeadObject();
137 // Sticky on enabling of shared memory MediaBuffers. By default we don't use
138 // shared memory for MediaBuffers, but we enable this for those processes
/frameworks/av/media/libstagefright/include/media/stagefright/
H A DMediaBuffer.h53 // allocations larger than or equal to this will use shared memory.
132 static bool isDeadObject(const sp<IMemory> &memory) { argument
133 if (memory.get() == nullptr || memory->pointer() == nullptr) return false;
134 return reinterpret_cast<SharedControl *>(memory->pointer())->isDeadObject();
137 // Sticky on enabling of shared memory MediaBuffers. By default we don't use
138 // shared memory for MediaBuffers, but we enable this for those processes
/frameworks/base/libs/hwui/
H A DCanvasState.cpp38 // Now actually release the memory
80 void* memory; local
82 memory = mSnapshotPool;
86 memory = malloc(sizeof(Snapshot));
88 return new (memory) Snapshot(previous, savecount);
/frameworks/native/vulkan/tools/
H A Dvkinfo.cpp39 VkPhysicalDeviceMemoryProperties memory; member in struct:__anon2054::GpuInfo
131 vkGetPhysicalDeviceMemoryProperties(gpu, &info.memory);
502 for (uint32_t heap = 0; heap < info.memory.memoryHeapCount; heap++) {
503 if ((info.memory.memoryHeaps[heap].flags &
508 info.memory.memoryHeaps[heap].size / 0x100000,
509 info.memory.memoryHeaps[heap].size, strbuf.str().c_str());
512 for (uint32_t type = 0; type < info.memory.memoryTypeCount; type++) {
513 if (info.memory.memoryTypes[type].heapIndex != heap)
516 info.memory.memoryTypes[type].propertyFlags;
/frameworks/av/drm/libmediadrm/
H A DCryptoHal.cpp226 * size. Once the heap base is established, shared memory buffers
259 status_t CryptoHal::toSharedBuffer(const sp<IMemory>& memory, int32_t seqNum, ::SharedBuffer* buffer) { argument
263 if (memory == NULL && buffer == NULL) {
267 sp<IMemoryHeap> heap = memory->getMemory(&offset, &size);
272 // memory must be in the declared heap
/frameworks/ml/nn/common/
H A DUtils.cpp219 hidl_memory memory; local
221 // TODO: should we align memory size to nearest page? doesn't seem necessary...
228 memory = mem;
232 return memory;
/frameworks/native/libs/binder/
H A DMemoryDealer.cpp222 // might kick out the memory region that's allocated and/or written
243 sp<IMemory> memory; local
246 memory = new Allocation(this, heap(), offset, size);
248 return memory;
277 // align all the memory blocks on a cache-line boundary
/frameworks/native/services/sensorservice/
H A DSensorDevice.cpp468 int32_t SensorDevice::registerDirectChannel(const sensors_direct_mem_t* memory) { argument
473 switch (memory->type) {
485 if (memory->format != SENSOR_DIRECT_FMT_SENSORS_EVENT) {
493 .size = static_cast<uint32_t>(memory->size),
494 .memoryHandle = memory->handle,
/frameworks/av/services/soundtrigger/
H A DSoundTriggerHwService.cpp485 SoundTriggerHwService::CallbackEvent::CallbackEvent(event_type type, sp<IMemory> memory) argument
486 : mType(type), mMemory(memory)
/frameworks/ml/nn/runtime/include/
H A DNeuralNetworksWrapper.h155 void setOperandValueFromMemory(uint32_t index, const Memory* memory, uint32_t offset, argument
157 if (ANeuralNetworksModel_setOperandValueFromMemory(mModel, index, memory->get(), offset,
296 Result setInputFromMemory(uint32_t index, const Memory* memory, uint32_t offset, argument
299 mExecution, index, type, memory->get(), offset, length));
308 Result setOutputFromMemory(uint32_t index, const Memory* memory, uint32_t offset, argument
311 mExecution, index, type, memory->get(), offset, length));
/frameworks/base/core/jni/
H A Dandroid_hardware_SoundTrigger.cpp510 sp<IMemory> memory; local
588 memory = memoryDealer->allocate(offset + size);
589 if (memory == 0 || memory->pointer() == NULL) {
594 nSoundModel = (struct sound_trigger_sound_model *)memory->pointer();
655 status = module->loadSoundModel(memory, &handle);
719 sp<IMemory> memory = memoryDealer->allocate(totalSize); local
720 if (memory == 0 || memory->pointer() == NULL) {
724 memcpy((char *)memory
[all...]
/frameworks/native/vulkan/nulldrv/
H A Dnull_driver.cpp826 " exceeds max device memory size 0x%" PRIx64,
885 " exceeds max device memory size 0x%" PRIx64,
1173 void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) { argument
/frameworks/native/vulkan/libvulkan/
H A Dapi_gen.cpp304 VKAPI_ATTR void FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
305 VKAPI_ATTR VkResult MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
306 VKAPI_ATTR void UnmapMemory(VkDevice device, VkDeviceMemory memory);
309 VKAPI_ATTR void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
311 VKAPI_ATTR VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
313 VKAPI_ATTR VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
716 VKAPI_ATTR void FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) { argument
717 GetData(device).dispatch.FreeMemory(device, memory, pAllocator);
720 VKAPI_ATTR VkResult MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) { argument
721 return GetData(device).dispatch.MapMemory(device, memory, offse
724 UnmapMemory(VkDevice device, VkDeviceMemory memory) argument
736 GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) argument
744 BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) argument
752 BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) argument
1341 vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) argument
1346 vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) argument
1351 vkUnmapMemory(VkDevice device, VkDeviceMemory memory) argument
1366 vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) argument
1376 vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) argument
1386 vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) argument
[all...]
/frameworks/native/vulkan/include/vulkan/
H A Dvulkan.h1717 VkDeviceMemory memory; member in struct:VkMappedMemoryRange
1745 VkDeviceMemory memory; member in struct:VkSparseMemoryBind
1778 VkDeviceMemory memory; member in struct:VkSparseImageMemoryBind
2498 typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
2499 typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
2500 typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
2503 typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
2504 typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
2505 typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
2724 VkDeviceMemory memory,
4241 VkDeviceMemory memory; member in struct:VkMemoryGetWin32HandleInfoKHR
4283 VkDeviceMemory memory; member in struct:VkMemoryGetFdInfoKHR
5393 VkDeviceMemory memory; member in struct:VkBindBufferMemoryInfoKHX
5403 VkDeviceMemory memory; member in struct:VkBindImageMemoryInfoKHX
[all...]
/frameworks/data-binding/prebuilds/1.0-rc0/
H A Ddatabinding-studio-bundle.jarMETA-INF/ META-INF/MANIFEST.MF android/ android/databinding/ android/databinding/Bindable.class Bindable. ...

Completed in 538 milliseconds