null_driver.cpp revision 3fbc856120217247c72fb5ed88500000f3881c45
1/* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include <hardware/hwvulkan.h> 18 19#include <array> 20#include <algorithm> 21#include <inttypes.h> 22#include <string.h> 23 24// #define LOG_NDEBUG 0 25#include <log/log.h> 26#include <utils/Errors.h> 27 28#include "null_driver.h" 29 30using namespace null_driver; 31 32struct VkPhysicalDevice_T { 33 hwvulkan_dispatch_t dispatch; 34}; 35 36struct VkInstance_T { 37 hwvulkan_dispatch_t dispatch; 38 VkAllocationCallbacks allocator; 39 VkPhysicalDevice_T physical_device; 40}; 41 42struct VkQueue_T { 43 hwvulkan_dispatch_t dispatch; 44}; 45 46struct VkCommandBuffer_T { 47 hwvulkan_dispatch_t dispatch; 48}; 49 50namespace { 51// Handles for non-dispatchable objects are either pointers, or arbitrary 52// 64-bit non-zero values. We only use pointers when we need to keep state for 53// the object even in a null driver. For the rest, we form a handle as: 54// [63:63] = 1 to distinguish from pointer handles* 55// [62:56] = non-zero handle type enum value 56// [55: 0] = per-handle-type incrementing counter 57// * This works because virtual addresses with the high bit set are reserved 58// for kernel data in all ABIs we run on. 59// 60// We never reclaim handles on vkDestroy*. It's not even necessary for us to 61// have distinct handles for live objects, and practically speaking we won't 62// ever create 2^56 objects of the same type from a single VkDevice in a null 63// driver. 64// 65// Using a namespace here instead of 'enum class' since we want scoped 66// constants but also want implicit conversions to integral types. 67namespace HandleType { 68enum Enum { 69 kBufferView, 70 kDescriptorPool, 71 kDescriptorSet, 72 kDescriptorSetLayout, 73 kEvent, 74 kFence, 75 kFramebuffer, 76 kImageView, 77 kPipeline, 78 kPipelineCache, 79 kPipelineLayout, 80 kQueryPool, 81 kRenderPass, 82 kSampler, 83 kSemaphore, 84 kShader, 85 kShaderModule, 86 87 kNumTypes 88}; 89} // namespace HandleType 90uint64_t AllocHandle(VkDevice device, HandleType::Enum type); 91 92const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1; 93 94} // anonymous namespace 95 96struct VkDevice_T { 97 hwvulkan_dispatch_t dispatch; 98 VkAllocationCallbacks allocator; 99 VkInstance_T* instance; 100 VkQueue_T queue; 101 std::array<uint64_t, HandleType::kNumTypes> next_handle; 102}; 103 104// ----------------------------------------------------------------------------- 105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device 106// later. 107 108namespace { 109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device); 110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice}; 111} // namespace 112 113#pragma clang diagnostic push 114#pragma clang diagnostic ignored "-Wmissing-variable-declarations" 115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = { 116 .common = 117 { 118 .tag = HARDWARE_MODULE_TAG, 119 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1, 120 .hal_api_version = HARDWARE_HAL_API_VERSION, 121 .id = HWVULKAN_HARDWARE_MODULE_ID, 122 .name = "Null Vulkan Driver", 123 .author = "The Android Open Source Project", 124 .methods = &nulldrv_module_methods, 125 }, 126}; 127#pragma clang diagnostic pop 128 129// ----------------------------------------------------------------------------- 130 131namespace { 132 133VkResult CreateInstance(const VkInstanceCreateInfo* /*create_info*/, 134 const VkAllocationCallbacks* allocator, 135 VkInstance* out_instance) { 136 // Assume the loader provided alloc callbacks even if the app didn't. 137 ALOG_ASSERT( 138 allocator, 139 "Missing alloc callbacks, loader or app should have provided them"); 140 141 VkInstance_T* instance = 142 static_cast<VkInstance_T*>(allocator->pfnAllocation( 143 allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T), 144 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE)); 145 if (!instance) 146 return VK_ERROR_OUT_OF_HOST_MEMORY; 147 148 instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 149 instance->allocator = *allocator; 150 instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 151 152 *out_instance = instance; 153 return VK_SUCCESS; 154} 155 156int CloseDevice(struct hw_device_t* /*device*/) { 157 // nothing to do - opening a device doesn't allocate any resources 158 return 0; 159} 160 161hwvulkan_device_t nulldrv_device = { 162 .common = 163 { 164 .tag = HARDWARE_DEVICE_TAG, 165 .version = HWVULKAN_DEVICE_API_VERSION_0_1, 166 .module = &HAL_MODULE_INFO_SYM.common, 167 .close = CloseDevice, 168 }, 169 .EnumerateInstanceExtensionProperties = 170 EnumerateInstanceExtensionProperties, 171 .CreateInstance = CreateInstance, 172 .GetInstanceProcAddr = GetInstanceProcAddr}; 173 174int OpenDevice(const hw_module_t* /*module*/, 175 const char* id, 176 hw_device_t** device) { 177 if (strcmp(id, HWVULKAN_DEVICE_0) == 0) { 178 *device = &nulldrv_device.common; 179 return 0; 180 } 181 return -ENOENT; 182} 183 184VkInstance_T* GetInstanceFromPhysicalDevice( 185 VkPhysicalDevice_T* physical_device) { 186 return reinterpret_cast<VkInstance_T*>( 187 reinterpret_cast<uintptr_t>(physical_device) - 188 offsetof(VkInstance_T, physical_device)); 189} 190 191uint64_t AllocHandle(VkDevice device, HandleType::Enum type) { 192 const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1; 193 ALOGE_IF(device->next_handle[type] == kHandleMask, 194 "non-dispatchable handles of type=%u are about to overflow", type); 195 return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) | 196 (device->next_handle[type]++ & kHandleMask); 197} 198 199} // namespace 200 201namespace null_driver { 202 203#define DEFINE_OBJECT_HANDLE_CONVERSION(T) \ 204 T* Get##T##FromHandle(Vk##T h); \ 205 T* Get##T##FromHandle(Vk##T h) { \ 206 return reinterpret_cast<T*>(uintptr_t(h)); \ 207 } \ 208 Vk##T GetHandleTo##T(const T* obj); \ 209 Vk##T GetHandleTo##T(const T* obj) { \ 210 return Vk##T(reinterpret_cast<uintptr_t>(obj)); \ 211 } 212 213// ----------------------------------------------------------------------------- 214// Global 215 216VkResult EnumerateInstanceExtensionProperties(const char*, 217 uint32_t* count, 218 VkExtensionProperties*) { 219 *count = 0; 220 return VK_SUCCESS; 221} 222 223PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) { 224 PFN_vkVoidFunction proc = LookupInstanceProcAddr(name); 225 if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0) 226 proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr); 227 return proc; 228} 229 230PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) { 231 PFN_vkVoidFunction proc = LookupDeviceProcAddr(name); 232 if (proc) 233 return proc; 234 if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0) 235 return reinterpret_cast<PFN_vkVoidFunction>( 236 GetSwapchainGrallocUsageANDROID); 237 if (strcmp(name, "vkAcquireImageANDROID") == 0) 238 return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID); 239 if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0) 240 return reinterpret_cast<PFN_vkVoidFunction>( 241 QueueSignalReleaseImageANDROID); 242 return nullptr; 243} 244 245// ----------------------------------------------------------------------------- 246// Instance 247 248void DestroyInstance(VkInstance instance, 249 const VkAllocationCallbacks* /*allocator*/) { 250 instance->allocator.pfnFree(instance->allocator.pUserData, instance); 251} 252 253// ----------------------------------------------------------------------------- 254// PhysicalDevice 255 256VkResult EnumeratePhysicalDevices(VkInstance instance, 257 uint32_t* physical_device_count, 258 VkPhysicalDevice* physical_devices) { 259 if (physical_devices && *physical_device_count >= 1) 260 physical_devices[0] = &instance->physical_device; 261 *physical_device_count = 1; 262 return VK_SUCCESS; 263} 264 265void GetPhysicalDeviceProperties(VkPhysicalDevice, 266 VkPhysicalDeviceProperties* properties) { 267 properties->apiVersion = VK_API_VERSION; 268 properties->driverVersion = VK_MAKE_VERSION(0, 0, 1); 269 properties->vendorId = 0; 270 properties->deviceId = 0; 271 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER; 272 strcpy(properties->deviceName, "Android Vulkan Null Driver"); 273 memset(properties->pipelineCacheUUID, 0, 274 sizeof(properties->pipelineCacheUUID)); 275} 276 277void GetPhysicalDeviceQueueFamilyProperties( 278 VkPhysicalDevice, 279 uint32_t* count, 280 VkQueueFamilyProperties* properties) { 281 if (properties) { 282 properties->queueFlags = 283 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_DMA_BIT; 284 properties->queueCount = 1; 285 properties->timestampValidBits = 64; 286 } 287 *count = 1; 288} 289 290void GetPhysicalDeviceMemoryProperties( 291 VkPhysicalDevice, 292 VkPhysicalDeviceMemoryProperties* properties) { 293 properties->memoryTypeCount = 1; 294 properties->memoryTypes[0].propertyFlags = 295 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; 296 properties->memoryTypes[0].heapIndex = 0; 297 properties->memoryHeapCount = 1; 298 properties->memoryHeaps[0].size = kMaxDeviceMemory; 299 properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_HOST_LOCAL_BIT; 300} 301 302// ----------------------------------------------------------------------------- 303// Device 304 305VkResult CreateDevice(VkPhysicalDevice physical_device, 306 const VkDeviceCreateInfo*, 307 const VkAllocationCallbacks* allocator, 308 VkDevice* out_device) { 309 VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device); 310 if (!allocator) 311 allocator = &instance->allocator; 312 VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation( 313 allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T), 314 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE)); 315 if (!device) 316 return VK_ERROR_OUT_OF_HOST_MEMORY; 317 318 device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 319 device->allocator = *allocator; 320 device->instance = instance; 321 device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 322 std::fill(device->next_handle.begin(), device->next_handle.end(), 323 UINT64_C(0)); 324 325 *out_device = device; 326 return VK_SUCCESS; 327} 328 329void DestroyDevice(VkDevice device, 330 const VkAllocationCallbacks* /*allocator*/) { 331 if (!device) 332 return; 333 device->allocator.pfnFree(device->allocator.pUserData, device); 334} 335 336void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) { 337 *queue = &device->queue; 338} 339 340// ----------------------------------------------------------------------------- 341// CommandPool 342 343struct CommandPool { 344 typedef VkCommandPool HandleType; 345 VkAllocationCallbacks allocator; 346}; 347DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool) 348 349VkResult CreateCommandPool(VkDevice device, 350 const VkCommandPoolCreateInfo* /*create_info*/, 351 const VkAllocationCallbacks* allocator, 352 VkCommandPool* cmd_pool) { 353 if (!allocator) 354 allocator = &device->allocator; 355 CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation( 356 allocator->pUserData, sizeof(CommandPool), alignof(CommandPool), 357 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 358 if (!pool) 359 return VK_ERROR_OUT_OF_HOST_MEMORY; 360 pool->allocator = *allocator; 361 *cmd_pool = GetHandleToCommandPool(pool); 362 return VK_SUCCESS; 363} 364 365void DestroyCommandPool(VkDevice /*device*/, 366 VkCommandPool cmd_pool, 367 const VkAllocationCallbacks* /*allocator*/) { 368 CommandPool* pool = GetCommandPoolFromHandle(cmd_pool); 369 pool->allocator.pfnFree(pool->allocator.pUserData, pool); 370} 371 372// ----------------------------------------------------------------------------- 373// CmdBuffer 374 375VkResult AllocateCommandBuffers(VkDevice /*device*/, 376 const VkCommandBufferAllocateInfo* alloc_info, 377 VkCommandBuffer* cmdbufs) { 378 VkResult result = VK_SUCCESS; 379 CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool); 380 std::fill(cmdbufs, cmdbufs + alloc_info->bufferCount, nullptr); 381 for (uint32_t i = 0; i < alloc_info->bufferCount; i++) { 382 cmdbufs[i] = 383 static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation( 384 pool.allocator.pUserData, sizeof(VkCommandBuffer_T), 385 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 386 if (!cmdbufs[i]) { 387 result = VK_ERROR_OUT_OF_HOST_MEMORY; 388 break; 389 } 390 cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 391 } 392 if (result != VK_SUCCESS) { 393 for (uint32_t i = 0; i < alloc_info->bufferCount; i++) { 394 if (!cmdbufs[i]) 395 break; 396 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]); 397 } 398 } 399 return result; 400} 401 402void FreeCommandBuffers(VkDevice /*device*/, 403 VkCommandPool cmd_pool, 404 uint32_t count, 405 const VkCommandBuffer* cmdbufs) { 406 CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool); 407 for (uint32_t i = 0; i < count; i++) 408 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]); 409} 410 411// ----------------------------------------------------------------------------- 412// DeviceMemory 413 414struct DeviceMemory { 415 typedef VkDeviceMemory HandleType; 416 VkDeviceSize size; 417 alignas(16) uint8_t data[0]; 418}; 419DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory) 420 421VkResult AllocateMemory(VkDevice device, 422 const VkMemoryAllocateInfo* alloc_info, 423 const VkAllocationCallbacks* allocator, 424 VkDeviceMemory* mem_handle) { 425 if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize) 426 return VK_ERROR_OUT_OF_HOST_MEMORY; 427 if (!allocator) 428 allocator = &device->allocator; 429 430 size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize); 431 DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation( 432 allocator->pUserData, size, alignof(DeviceMemory), 433 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 434 if (!mem) 435 return VK_ERROR_OUT_OF_HOST_MEMORY; 436 mem->size = size; 437 *mem_handle = GetHandleToDeviceMemory(mem); 438 return VK_SUCCESS; 439} 440 441void FreeMemory(VkDevice device, 442 VkDeviceMemory mem_handle, 443 const VkAllocationCallbacks* allocator) { 444 if (!allocator) 445 allocator = &device->allocator; 446 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle); 447 allocator->pfnFree(allocator->pUserData, mem); 448} 449 450VkResult MapMemory(VkDevice, 451 VkDeviceMemory mem_handle, 452 VkDeviceSize offset, 453 VkDeviceSize, 454 VkMemoryMapFlags, 455 void** out_ptr) { 456 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle); 457 *out_ptr = &mem->data[0] + offset; 458 return VK_SUCCESS; 459} 460 461// ----------------------------------------------------------------------------- 462// Buffer 463 464struct Buffer { 465 typedef VkBuffer HandleType; 466 VkDeviceSize size; 467}; 468DEFINE_OBJECT_HANDLE_CONVERSION(Buffer) 469 470VkResult CreateBuffer(VkDevice device, 471 const VkBufferCreateInfo* create_info, 472 const VkAllocationCallbacks* allocator, 473 VkBuffer* buffer_handle) { 474 ALOGW_IF(create_info->size > kMaxDeviceMemory, 475 "CreateBuffer: requested size 0x%" PRIx64 476 " exceeds max device memory size 0x%" PRIx64, 477 create_info->size, kMaxDeviceMemory); 478 if (!allocator) 479 allocator = &device->allocator; 480 Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation( 481 allocator->pUserData, sizeof(Buffer), alignof(Buffer), 482 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 483 if (!buffer) 484 return VK_ERROR_OUT_OF_HOST_MEMORY; 485 buffer->size = create_info->size; 486 *buffer_handle = GetHandleToBuffer(buffer); 487 return VK_SUCCESS; 488} 489 490void GetBufferMemoryRequirements(VkDevice, 491 VkBuffer buffer_handle, 492 VkMemoryRequirements* requirements) { 493 Buffer* buffer = GetBufferFromHandle(buffer_handle); 494 requirements->size = buffer->size; 495 requirements->alignment = 16; // allow fast Neon/SSE memcpy 496 requirements->memoryTypeBits = 0x1; 497} 498 499void DestroyBuffer(VkDevice device, 500 VkBuffer buffer_handle, 501 const VkAllocationCallbacks* allocator) { 502 if (!allocator) 503 allocator = &device->allocator; 504 Buffer* buffer = GetBufferFromHandle(buffer_handle); 505 allocator->pfnFree(allocator->pUserData, buffer); 506} 507 508// ----------------------------------------------------------------------------- 509// Image 510 511struct Image { 512 typedef VkImage HandleType; 513 VkDeviceSize size; 514}; 515DEFINE_OBJECT_HANDLE_CONVERSION(Image) 516 517VkResult CreateImage(VkDevice device, 518 const VkImageCreateInfo* create_info, 519 const VkAllocationCallbacks* allocator, 520 VkImage* image_handle) { 521 if (create_info->imageType != VK_IMAGE_TYPE_2D || 522 create_info->format != VK_FORMAT_R8G8B8A8_UNORM || 523 create_info->mipLevels != 1) { 524 ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u", 525 create_info->imageType, create_info->format, 526 create_info->mipLevels); 527 return VK_ERROR_OUT_OF_HOST_MEMORY; 528 } 529 530 VkDeviceSize size = 531 VkDeviceSize(create_info->extent.width * create_info->extent.height) * 532 create_info->arrayLayers * create_info->samples * 4u; 533 ALOGW_IF(size > kMaxDeviceMemory, 534 "CreateImage: image size 0x%" PRIx64 535 " exceeds max device memory size 0x%" PRIx64, 536 size, kMaxDeviceMemory); 537 538 if (!allocator) 539 allocator = &device->allocator; 540 Image* image = static_cast<Image*>(allocator->pfnAllocation( 541 allocator->pUserData, sizeof(Image), alignof(Image), 542 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 543 if (!image) 544 return VK_ERROR_OUT_OF_HOST_MEMORY; 545 image->size = size; 546 *image_handle = GetHandleToImage(image); 547 return VK_SUCCESS; 548} 549 550void GetImageMemoryRequirements(VkDevice, 551 VkImage image_handle, 552 VkMemoryRequirements* requirements) { 553 Image* image = GetImageFromHandle(image_handle); 554 requirements->size = image->size; 555 requirements->alignment = 16; // allow fast Neon/SSE memcpy 556 requirements->memoryTypeBits = 0x1; 557} 558 559void DestroyImage(VkDevice device, 560 VkImage image_handle, 561 const VkAllocationCallbacks* allocator) { 562 if (!allocator) 563 allocator = &device->allocator; 564 Image* image = GetImageFromHandle(image_handle); 565 allocator->pfnFree(allocator->pUserData, image); 566} 567 568// ----------------------------------------------------------------------------- 569// No-op types 570 571VkResult CreateBufferView(VkDevice device, 572 const VkBufferViewCreateInfo*, 573 const VkAllocationCallbacks* /*allocator*/, 574 VkBufferView* view) { 575 *view = AllocHandle(device, HandleType::kBufferView); 576 return VK_SUCCESS; 577} 578 579VkResult CreateDescriptorPool(VkDevice device, 580 const VkDescriptorPoolCreateInfo*, 581 const VkAllocationCallbacks* /*allocator*/, 582 VkDescriptorPool* pool) { 583 *pool = AllocHandle(device, HandleType::kDescriptorPool); 584 return VK_SUCCESS; 585} 586 587VkResult AllocateDescriptorSets(VkDevice device, 588 const VkDescriptorSetAllocateInfo* alloc_info, 589 VkDescriptorSet* descriptor_sets) { 590 for (uint32_t i = 0; i < alloc_info->setLayoutCount; i++) 591 descriptor_sets[i] = AllocHandle(device, HandleType::kDescriptorSet); 592 return VK_SUCCESS; 593} 594 595VkResult CreateDescriptorSetLayout(VkDevice device, 596 const VkDescriptorSetLayoutCreateInfo*, 597 const VkAllocationCallbacks* /*allocator*/, 598 VkDescriptorSetLayout* layout) { 599 *layout = AllocHandle(device, HandleType::kDescriptorSetLayout); 600 return VK_SUCCESS; 601} 602 603VkResult CreateEvent(VkDevice device, 604 const VkEventCreateInfo*, 605 const VkAllocationCallbacks* /*allocator*/, 606 VkEvent* event) { 607 *event = AllocHandle(device, HandleType::kEvent); 608 return VK_SUCCESS; 609} 610 611VkResult CreateFence(VkDevice device, 612 const VkFenceCreateInfo*, 613 const VkAllocationCallbacks* /*allocator*/, 614 VkFence* fence) { 615 *fence = AllocHandle(device, HandleType::kFence); 616 return VK_SUCCESS; 617} 618 619VkResult CreateFramebuffer(VkDevice device, 620 const VkFramebufferCreateInfo*, 621 const VkAllocationCallbacks* /*allocator*/, 622 VkFramebuffer* framebuffer) { 623 *framebuffer = AllocHandle(device, HandleType::kFramebuffer); 624 return VK_SUCCESS; 625} 626 627VkResult CreateImageView(VkDevice device, 628 const VkImageViewCreateInfo*, 629 const VkAllocationCallbacks* /*allocator*/, 630 VkImageView* view) { 631 *view = AllocHandle(device, HandleType::kImageView); 632 return VK_SUCCESS; 633} 634 635VkResult CreateGraphicsPipelines(VkDevice device, 636 VkPipelineCache, 637 uint32_t count, 638 const VkGraphicsPipelineCreateInfo*, 639 const VkAllocationCallbacks* /*allocator*/, 640 VkPipeline* pipelines) { 641 for (uint32_t i = 0; i < count; i++) 642 pipelines[i] = AllocHandle(device, HandleType::kPipeline); 643 return VK_SUCCESS; 644} 645 646VkResult CreateComputePipelines(VkDevice device, 647 VkPipelineCache, 648 uint32_t count, 649 const VkComputePipelineCreateInfo*, 650 const VkAllocationCallbacks* /*allocator*/, 651 VkPipeline* pipelines) { 652 for (uint32_t i = 0; i < count; i++) 653 pipelines[i] = AllocHandle(device, HandleType::kPipeline); 654 return VK_SUCCESS; 655} 656 657VkResult CreatePipelineCache(VkDevice device, 658 const VkPipelineCacheCreateInfo*, 659 const VkAllocationCallbacks* /*allocator*/, 660 VkPipelineCache* cache) { 661 *cache = AllocHandle(device, HandleType::kPipelineCache); 662 return VK_SUCCESS; 663} 664 665VkResult CreatePipelineLayout(VkDevice device, 666 const VkPipelineLayoutCreateInfo*, 667 const VkAllocationCallbacks* /*allocator*/, 668 VkPipelineLayout* layout) { 669 *layout = AllocHandle(device, HandleType::kPipelineLayout); 670 return VK_SUCCESS; 671} 672 673VkResult CreateQueryPool(VkDevice device, 674 const VkQueryPoolCreateInfo*, 675 const VkAllocationCallbacks* /*allocator*/, 676 VkQueryPool* pool) { 677 *pool = AllocHandle(device, HandleType::kQueryPool); 678 return VK_SUCCESS; 679} 680 681VkResult CreateRenderPass(VkDevice device, 682 const VkRenderPassCreateInfo*, 683 const VkAllocationCallbacks* /*allocator*/, 684 VkRenderPass* renderpass) { 685 *renderpass = AllocHandle(device, HandleType::kRenderPass); 686 return VK_SUCCESS; 687} 688 689VkResult CreateSampler(VkDevice device, 690 const VkSamplerCreateInfo*, 691 const VkAllocationCallbacks* /*allocator*/, 692 VkSampler* sampler) { 693 *sampler = AllocHandle(device, HandleType::kSampler); 694 return VK_SUCCESS; 695} 696 697VkResult CreateSemaphore(VkDevice device, 698 const VkSemaphoreCreateInfo*, 699 const VkAllocationCallbacks* /*allocator*/, 700 VkSemaphore* semaphore) { 701 *semaphore = AllocHandle(device, HandleType::kSemaphore); 702 return VK_SUCCESS; 703} 704 705VkResult CreateShader(VkDevice device, 706 const VkShaderCreateInfo*, 707 const VkAllocationCallbacks* /*allocator*/, 708 VkShader* shader) { 709 *shader = AllocHandle(device, HandleType::kShader); 710 return VK_SUCCESS; 711} 712 713VkResult CreateShaderModule(VkDevice device, 714 const VkShaderModuleCreateInfo*, 715 const VkAllocationCallbacks* /*allocator*/, 716 VkShaderModule* module) { 717 *module = AllocHandle(device, HandleType::kShaderModule); 718 return VK_SUCCESS; 719} 720 721VkResult GetSwapchainGrallocUsageANDROID(VkDevice, 722 VkFormat, 723 VkImageUsageFlags, 724 int* grallocUsage) { 725 // The null driver never reads or writes the gralloc buffer 726 *grallocUsage = 0; 727 return VK_SUCCESS; 728} 729 730VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) { 731 close(fence); 732 return VK_SUCCESS; 733} 734 735VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) { 736 *fence = -1; 737 return VK_SUCCESS; 738} 739 740// ----------------------------------------------------------------------------- 741// No-op entrypoints 742 743// clang-format off 744#pragma clang diagnostic push 745#pragma clang diagnostic ignored "-Wunused-parameter" 746 747void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) { 748 ALOGV("TODO: vk%s", __FUNCTION__); 749} 750 751void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) { 752 ALOGV("TODO: vk%s", __FUNCTION__); 753} 754 755void GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) { 756 ALOGV("TODO: vk%s", __FUNCTION__); 757} 758 759VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) { 760 ALOGV("TODO: vk%s", __FUNCTION__); 761 return VK_SUCCESS; 762} 763 764VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) { 765 ALOGV("TODO: vk%s", __FUNCTION__); 766 return VK_SUCCESS; 767} 768 769VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) { 770 ALOGV("TODO: vk%s", __FUNCTION__); 771 return VK_SUCCESS; 772} 773 774VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) { 775 return VK_SUCCESS; 776} 777 778VkResult QueueWaitIdle(VkQueue queue) { 779 ALOGV("TODO: vk%s", __FUNCTION__); 780 return VK_SUCCESS; 781} 782 783VkResult DeviceWaitIdle(VkDevice device) { 784 ALOGV("TODO: vk%s", __FUNCTION__); 785 return VK_SUCCESS; 786} 787 788void UnmapMemory(VkDevice device, VkDeviceMemory mem) { 789} 790 791VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) { 792 ALOGV("TODO: vk%s", __FUNCTION__); 793 return VK_SUCCESS; 794} 795 796VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) { 797 ALOGV("TODO: vk%s", __FUNCTION__); 798 return VK_SUCCESS; 799} 800 801void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) { 802 ALOGV("TODO: vk%s", __FUNCTION__); 803} 804 805VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) { 806 return VK_SUCCESS; 807} 808 809VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) { 810 return VK_SUCCESS; 811} 812 813void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) { 814 ALOGV("TODO: vk%s", __FUNCTION__); 815} 816 817void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) { 818 ALOGV("TODO: vk%s", __FUNCTION__); 819} 820 821VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) { 822 ALOGV("TODO: vk%s", __FUNCTION__); 823 return VK_SUCCESS; 824} 825 826void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) { 827} 828 829VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) { 830 return VK_SUCCESS; 831} 832 833VkResult GetFenceStatus(VkDevice device, VkFence fence) { 834 ALOGV("TODO: vk%s", __FUNCTION__); 835 return VK_SUCCESS; 836} 837 838VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) { 839 return VK_SUCCESS; 840} 841 842void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) { 843} 844 845void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) { 846} 847 848VkResult GetEventStatus(VkDevice device, VkEvent event) { 849 ALOGV("TODO: vk%s", __FUNCTION__); 850 return VK_SUCCESS; 851} 852 853VkResult SetEvent(VkDevice device, VkEvent event) { 854 ALOGV("TODO: vk%s", __FUNCTION__); 855 return VK_SUCCESS; 856} 857 858VkResult ResetEvent(VkDevice device, VkEvent event) { 859 ALOGV("TODO: vk%s", __FUNCTION__); 860 return VK_SUCCESS; 861} 862 863void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) { 864} 865 866VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) { 867 ALOGV("TODO: vk%s", __FUNCTION__); 868 return VK_SUCCESS; 869} 870 871void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) { 872} 873 874void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) { 875 ALOGV("TODO: vk%s", __FUNCTION__); 876} 877 878void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) { 879} 880 881void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) { 882} 883 884void DestroyShader(VkDevice device, VkShader shader, const VkAllocationCallbacks* allocator) { 885} 886 887void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) { 888} 889 890VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) { 891 ALOGV("TODO: vk%s", __FUNCTION__); 892 return VK_SUCCESS; 893} 894 895VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) { 896 ALOGV("TODO: vk%s", __FUNCTION__); 897 return VK_SUCCESS; 898} 899 900void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) { 901} 902 903void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) { 904} 905 906void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) { 907} 908 909void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) { 910} 911 912void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) { 913} 914 915VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) { 916 ALOGV("TODO: vk%s", __FUNCTION__); 917 return VK_SUCCESS; 918} 919 920void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) { 921 ALOGV("TODO: vk%s", __FUNCTION__); 922} 923 924VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) { 925 ALOGV("TODO: vk%s", __FUNCTION__); 926 return VK_SUCCESS; 927} 928 929void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) { 930} 931 932void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) { 933} 934 935void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) { 936 ALOGV("TODO: vk%s", __FUNCTION__); 937} 938 939VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) { 940 ALOGV("TODO: vk%s", __FUNCTION__); 941 return VK_SUCCESS; 942} 943 944VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) { 945 return VK_SUCCESS; 946} 947 948VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) { 949 return VK_SUCCESS; 950} 951 952VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) { 953 ALOGV("TODO: vk%s", __FUNCTION__); 954 return VK_SUCCESS; 955} 956 957void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) { 958} 959 960void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) { 961} 962 963void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) { 964} 965 966void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) { 967} 968 969void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) { 970} 971 972void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) { 973} 974 975void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) { 976} 977 978void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) { 979} 980 981void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) { 982} 983 984void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) { 985} 986 987void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) { 988} 989 990void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) { 991} 992 993void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) { 994} 995 996void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { 997} 998 999void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { 1000} 1001 1002void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 1003} 1004 1005void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 1006} 1007 1008void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) { 1009} 1010 1011void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) { 1012} 1013 1014void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) { 1015} 1016 1017void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) { 1018} 1019 1020void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) { 1021} 1022 1023void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) { 1024} 1025 1026void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) { 1027} 1028 1029void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) { 1030} 1031 1032void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) { 1033} 1034 1035void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { 1036} 1037 1038void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { 1039} 1040 1041void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) { 1042} 1043 1044void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) { 1045} 1046 1047void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 1048} 1049 1050void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 1051} 1052 1053void CmdWaitEvents(VkCommandBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) { 1054} 1055 1056void CmdPipelineBarrier(VkCommandBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkDependencyFlags dependencyFlags, uint32_t memBarrierCount, const void* const* ppMemBarriers) { 1057} 1058 1059void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) { 1060} 1061 1062void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) { 1063} 1064 1065void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) { 1066} 1067 1068void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) { 1069} 1070 1071void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) { 1072} 1073 1074void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) { 1075} 1076 1077void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkRenderPassContents contents) { 1078} 1079 1080void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkRenderPassContents contents) { 1081} 1082 1083void CmdEndRenderPass(VkCommandBuffer cmdBuffer) { 1084} 1085 1086void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) { 1087} 1088 1089#pragma clang diagnostic pop 1090// clang-format on 1091 1092} // namespace null_driver 1093