null_driver.cpp revision 715b86ac7d0853131b375ff786c87d8d87a762a1
1/* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include <hardware/hwvulkan.h> 18#include <vulkan/vk_ext_debug_report.h> 19 20#include <algorithm> 21#include <array> 22#include <inttypes.h> 23#include <string.h> 24 25#include <log/log.h> 26#include <utils/Errors.h> 27 28#include "null_driver_gen.h" 29 30using namespace null_driver; 31 32struct VkPhysicalDevice_T { 33 hwvulkan_dispatch_t dispatch; 34}; 35 36struct VkInstance_T { 37 hwvulkan_dispatch_t dispatch; 38 VkAllocationCallbacks allocator; 39 VkPhysicalDevice_T physical_device; 40 uint64_t next_callback_handle; 41 bool debug_report_enabled; 42}; 43 44struct VkQueue_T { 45 hwvulkan_dispatch_t dispatch; 46}; 47 48struct VkCommandBuffer_T { 49 hwvulkan_dispatch_t dispatch; 50}; 51 52namespace { 53// Handles for non-dispatchable objects are either pointers, or arbitrary 54// 64-bit non-zero values. We only use pointers when we need to keep state for 55// the object even in a null driver. For the rest, we form a handle as: 56// [63:63] = 1 to distinguish from pointer handles* 57// [62:56] = non-zero handle type enum value 58// [55: 0] = per-handle-type incrementing counter 59// * This works because virtual addresses with the high bit set are reserved 60// for kernel data in all ABIs we run on. 61// 62// We never reclaim handles on vkDestroy*. It's not even necessary for us to 63// have distinct handles for live objects, and practically speaking we won't 64// ever create 2^56 objects of the same type from a single VkDevice in a null 65// driver. 66// 67// Using a namespace here instead of 'enum class' since we want scoped 68// constants but also want implicit conversions to integral types. 69namespace HandleType { 70enum Enum { 71 kBufferView, 72 kDebugReportCallbackEXT, 73 kDescriptorPool, 74 kDescriptorSet, 75 kDescriptorSetLayout, 76 kEvent, 77 kFence, 78 kFramebuffer, 79 kImageView, 80 kPipeline, 81 kPipelineCache, 82 kPipelineLayout, 83 kQueryPool, 84 kRenderPass, 85 kSampler, 86 kSemaphore, 87 kShaderModule, 88 89 kNumTypes 90}; 91} // namespace HandleType 92 93const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1; 94 95} // anonymous namespace 96 97struct VkDevice_T { 98 hwvulkan_dispatch_t dispatch; 99 VkAllocationCallbacks allocator; 100 VkInstance_T* instance; 101 VkQueue_T queue; 102 std::array<uint64_t, HandleType::kNumTypes> next_handle; 103}; 104 105// ----------------------------------------------------------------------------- 106// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device 107// later. 108 109namespace { 110int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device); 111hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice}; 112} // namespace 113 114#pragma clang diagnostic push 115#pragma clang diagnostic ignored "-Wmissing-variable-declarations" 116__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = { 117 .common = 118 { 119 .tag = HARDWARE_MODULE_TAG, 120 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1, 121 .hal_api_version = HARDWARE_HAL_API_VERSION, 122 .id = HWVULKAN_HARDWARE_MODULE_ID, 123 .name = "Null Vulkan Driver", 124 .author = "The Android Open Source Project", 125 .methods = &nulldrv_module_methods, 126 }, 127}; 128#pragma clang diagnostic pop 129 130// ----------------------------------------------------------------------------- 131 132namespace { 133 134int CloseDevice(struct hw_device_t* /*device*/) { 135 // nothing to do - opening a device doesn't allocate any resources 136 return 0; 137} 138 139hwvulkan_device_t nulldrv_device = { 140 .common = 141 { 142 .tag = HARDWARE_DEVICE_TAG, 143 .version = HWVULKAN_DEVICE_API_VERSION_0_1, 144 .module = &HAL_MODULE_INFO_SYM.common, 145 .close = CloseDevice, 146 }, 147 .EnumerateInstanceExtensionProperties = 148 EnumerateInstanceExtensionProperties, 149 .CreateInstance = CreateInstance, 150 .GetInstanceProcAddr = GetInstanceProcAddr}; 151 152int OpenDevice(const hw_module_t* /*module*/, 153 const char* id, 154 hw_device_t** device) { 155 if (strcmp(id, HWVULKAN_DEVICE_0) == 0) { 156 *device = &nulldrv_device.common; 157 return 0; 158 } 159 return -ENOENT; 160} 161 162VkInstance_T* GetInstanceFromPhysicalDevice( 163 VkPhysicalDevice_T* physical_device) { 164 return reinterpret_cast<VkInstance_T*>( 165 reinterpret_cast<uintptr_t>(physical_device) - 166 offsetof(VkInstance_T, physical_device)); 167} 168 169uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) { 170 const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1; 171 ALOGE_IF(*next_handle == kHandleMask, 172 "non-dispatchable handles of type=%" PRIu64 173 " are about to overflow", 174 type); 175 return (UINT64_C(1) << 63) | ((type & 0x7) << 56) | 176 ((*next_handle)++ & kHandleMask); 177} 178 179template <class Handle> 180Handle AllocHandle(VkInstance instance, HandleType::Enum type) { 181 return reinterpret_cast<Handle>( 182 AllocHandle(type, &instance->next_callback_handle)); 183} 184 185template <class Handle> 186Handle AllocHandle(VkDevice device, HandleType::Enum type) { 187 return reinterpret_cast<Handle>( 188 AllocHandle(type, &device->next_handle[type])); 189} 190 191} // namespace 192 193namespace null_driver { 194 195#define DEFINE_OBJECT_HANDLE_CONVERSION(T) \ 196 T* Get##T##FromHandle(Vk##T h); \ 197 T* Get##T##FromHandle(Vk##T h) { \ 198 return reinterpret_cast<T*>(uintptr_t(h)); \ 199 } \ 200 Vk##T GetHandleTo##T(const T* obj); \ 201 Vk##T GetHandleTo##T(const T* obj) { \ 202 return Vk##T(reinterpret_cast<uintptr_t>(obj)); \ 203 } 204 205// ----------------------------------------------------------------------------- 206// Global 207 208VKAPI_ATTR 209VkResult EnumerateInstanceExtensionProperties( 210 const char* layer_name, 211 uint32_t* count, 212 VkExtensionProperties* properties) { 213 if (layer_name) { 214 ALOGW( 215 "Driver vkEnumerateInstanceExtensionProperties shouldn't be called " 216 "with a layer name ('%s')", 217 layer_name); 218 *count = 0; 219 return VK_SUCCESS; 220 } 221 222 const VkExtensionProperties kExtensions[] = { 223 {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}}; 224 const uint32_t kExtensionsCount = 225 sizeof(kExtensions) / sizeof(kExtensions[0]); 226 227 if (!properties || *count > kExtensionsCount) 228 *count = kExtensionsCount; 229 if (properties) 230 std::copy(kExtensions, kExtensions + *count, properties); 231 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS; 232} 233 234VKAPI_ATTR 235VkResult CreateInstance(const VkInstanceCreateInfo* create_info, 236 const VkAllocationCallbacks* allocator, 237 VkInstance* out_instance) { 238 // Assume the loader provided alloc callbacks even if the app didn't. 239 ALOG_ASSERT( 240 allocator, 241 "Missing alloc callbacks, loader or app should have provided them"); 242 243 VkInstance_T* instance = 244 static_cast<VkInstance_T*>(allocator->pfnAllocation( 245 allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T), 246 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE)); 247 if (!instance) 248 return VK_ERROR_OUT_OF_HOST_MEMORY; 249 250 instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 251 instance->allocator = *allocator; 252 instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 253 instance->next_callback_handle = 0; 254 instance->debug_report_enabled = false; 255 256 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) { 257 if (strcmp(create_info->ppEnabledExtensionNames[i], 258 VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) { 259 ALOGV("Enabling " VK_EXT_DEBUG_REPORT_EXTENSION_NAME); 260 instance->debug_report_enabled = true; 261 } 262 } 263 264 *out_instance = instance; 265 return VK_SUCCESS; 266} 267 268VKAPI_ATTR 269PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) { 270 return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name); 271} 272 273VKAPI_ATTR 274PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) { 275 return GetInstanceProcAddr(name); 276} 277 278// ----------------------------------------------------------------------------- 279// Instance 280 281void DestroyInstance(VkInstance instance, 282 const VkAllocationCallbacks* /*allocator*/) { 283 instance->allocator.pfnFree(instance->allocator.pUserData, instance); 284} 285 286// ----------------------------------------------------------------------------- 287// PhysicalDevice 288 289VkResult EnumeratePhysicalDevices(VkInstance instance, 290 uint32_t* physical_device_count, 291 VkPhysicalDevice* physical_devices) { 292 if (physical_devices && *physical_device_count >= 1) 293 physical_devices[0] = &instance->physical_device; 294 *physical_device_count = 1; 295 return VK_SUCCESS; 296} 297 298void GetPhysicalDeviceProperties(VkPhysicalDevice, 299 VkPhysicalDeviceProperties* properties) { 300 properties->apiVersion = VK_API_VERSION; 301 properties->driverVersion = VK_MAKE_VERSION(0, 0, 1); 302 properties->vendorID = 0; 303 properties->deviceID = 0; 304 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER; 305 strcpy(properties->deviceName, "Android Vulkan Null Driver"); 306 memset(properties->pipelineCacheUUID, 0, 307 sizeof(properties->pipelineCacheUUID)); 308} 309 310void GetPhysicalDeviceQueueFamilyProperties( 311 VkPhysicalDevice, 312 uint32_t* count, 313 VkQueueFamilyProperties* properties) { 314 if (!properties || *count > 1) 315 *count = 1; 316 if (properties && *count == 1) { 317 properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | 318 VK_QUEUE_TRANSFER_BIT; 319 properties->queueCount = 1; 320 properties->timestampValidBits = 64; 321 properties->minImageTransferGranularity = VkExtent3D{1, 1, 1}; 322 } 323} 324 325void GetPhysicalDeviceMemoryProperties( 326 VkPhysicalDevice, 327 VkPhysicalDeviceMemoryProperties* properties) { 328 properties->memoryTypeCount = 1; 329 properties->memoryTypes[0].propertyFlags = 330 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | 331 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | 332 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | 333 VK_MEMORY_PROPERTY_HOST_CACHED_BIT; 334 properties->memoryTypes[0].heapIndex = 0; 335 properties->memoryHeapCount = 1; 336 properties->memoryHeaps[0].size = kMaxDeviceMemory; 337 properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT; 338} 339 340// ----------------------------------------------------------------------------- 341// Device 342 343VkResult CreateDevice(VkPhysicalDevice physical_device, 344 const VkDeviceCreateInfo*, 345 const VkAllocationCallbacks* allocator, 346 VkDevice* out_device) { 347 VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device); 348 if (!allocator) 349 allocator = &instance->allocator; 350 VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation( 351 allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T), 352 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE)); 353 if (!device) 354 return VK_ERROR_OUT_OF_HOST_MEMORY; 355 356 device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 357 device->allocator = *allocator; 358 device->instance = instance; 359 device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 360 std::fill(device->next_handle.begin(), device->next_handle.end(), 361 UINT64_C(0)); 362 363 *out_device = device; 364 return VK_SUCCESS; 365} 366 367void DestroyDevice(VkDevice device, 368 const VkAllocationCallbacks* /*allocator*/) { 369 if (!device) 370 return; 371 device->allocator.pfnFree(device->allocator.pUserData, device); 372} 373 374void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) { 375 *queue = &device->queue; 376} 377 378// ----------------------------------------------------------------------------- 379// CommandPool 380 381struct CommandPool { 382 typedef VkCommandPool HandleType; 383 VkAllocationCallbacks allocator; 384}; 385DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool) 386 387VkResult CreateCommandPool(VkDevice device, 388 const VkCommandPoolCreateInfo* /*create_info*/, 389 const VkAllocationCallbacks* allocator, 390 VkCommandPool* cmd_pool) { 391 if (!allocator) 392 allocator = &device->allocator; 393 CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation( 394 allocator->pUserData, sizeof(CommandPool), alignof(CommandPool), 395 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 396 if (!pool) 397 return VK_ERROR_OUT_OF_HOST_MEMORY; 398 pool->allocator = *allocator; 399 *cmd_pool = GetHandleToCommandPool(pool); 400 return VK_SUCCESS; 401} 402 403void DestroyCommandPool(VkDevice /*device*/, 404 VkCommandPool cmd_pool, 405 const VkAllocationCallbacks* /*allocator*/) { 406 CommandPool* pool = GetCommandPoolFromHandle(cmd_pool); 407 pool->allocator.pfnFree(pool->allocator.pUserData, pool); 408} 409 410// ----------------------------------------------------------------------------- 411// CmdBuffer 412 413VkResult AllocateCommandBuffers(VkDevice /*device*/, 414 const VkCommandBufferAllocateInfo* alloc_info, 415 VkCommandBuffer* cmdbufs) { 416 VkResult result = VK_SUCCESS; 417 CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool); 418 std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr); 419 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) { 420 cmdbufs[i] = 421 static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation( 422 pool.allocator.pUserData, sizeof(VkCommandBuffer_T), 423 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 424 if (!cmdbufs[i]) { 425 result = VK_ERROR_OUT_OF_HOST_MEMORY; 426 break; 427 } 428 cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC; 429 } 430 if (result != VK_SUCCESS) { 431 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) { 432 if (!cmdbufs[i]) 433 break; 434 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]); 435 } 436 } 437 return result; 438} 439 440void FreeCommandBuffers(VkDevice /*device*/, 441 VkCommandPool cmd_pool, 442 uint32_t count, 443 const VkCommandBuffer* cmdbufs) { 444 CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool); 445 for (uint32_t i = 0; i < count; i++) 446 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]); 447} 448 449// ----------------------------------------------------------------------------- 450// DeviceMemory 451 452struct DeviceMemory { 453 typedef VkDeviceMemory HandleType; 454 VkDeviceSize size; 455 alignas(16) uint8_t data[0]; 456}; 457DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory) 458 459VkResult AllocateMemory(VkDevice device, 460 const VkMemoryAllocateInfo* alloc_info, 461 const VkAllocationCallbacks* allocator, 462 VkDeviceMemory* mem_handle) { 463 if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize) 464 return VK_ERROR_OUT_OF_HOST_MEMORY; 465 if (!allocator) 466 allocator = &device->allocator; 467 468 size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize); 469 DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation( 470 allocator->pUserData, size, alignof(DeviceMemory), 471 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 472 if (!mem) 473 return VK_ERROR_OUT_OF_HOST_MEMORY; 474 mem->size = size; 475 *mem_handle = GetHandleToDeviceMemory(mem); 476 return VK_SUCCESS; 477} 478 479void FreeMemory(VkDevice device, 480 VkDeviceMemory mem_handle, 481 const VkAllocationCallbacks* allocator) { 482 if (!allocator) 483 allocator = &device->allocator; 484 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle); 485 allocator->pfnFree(allocator->pUserData, mem); 486} 487 488VkResult MapMemory(VkDevice, 489 VkDeviceMemory mem_handle, 490 VkDeviceSize offset, 491 VkDeviceSize, 492 VkMemoryMapFlags, 493 void** out_ptr) { 494 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle); 495 *out_ptr = &mem->data[0] + offset; 496 return VK_SUCCESS; 497} 498 499// ----------------------------------------------------------------------------- 500// Buffer 501 502struct Buffer { 503 typedef VkBuffer HandleType; 504 VkDeviceSize size; 505}; 506DEFINE_OBJECT_HANDLE_CONVERSION(Buffer) 507 508VkResult CreateBuffer(VkDevice device, 509 const VkBufferCreateInfo* create_info, 510 const VkAllocationCallbacks* allocator, 511 VkBuffer* buffer_handle) { 512 ALOGW_IF(create_info->size > kMaxDeviceMemory, 513 "CreateBuffer: requested size 0x%" PRIx64 514 " exceeds max device memory size 0x%" PRIx64, 515 create_info->size, kMaxDeviceMemory); 516 if (!allocator) 517 allocator = &device->allocator; 518 Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation( 519 allocator->pUserData, sizeof(Buffer), alignof(Buffer), 520 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 521 if (!buffer) 522 return VK_ERROR_OUT_OF_HOST_MEMORY; 523 buffer->size = create_info->size; 524 *buffer_handle = GetHandleToBuffer(buffer); 525 return VK_SUCCESS; 526} 527 528void GetBufferMemoryRequirements(VkDevice, 529 VkBuffer buffer_handle, 530 VkMemoryRequirements* requirements) { 531 Buffer* buffer = GetBufferFromHandle(buffer_handle); 532 requirements->size = buffer->size; 533 requirements->alignment = 16; // allow fast Neon/SSE memcpy 534 requirements->memoryTypeBits = 0x1; 535} 536 537void DestroyBuffer(VkDevice device, 538 VkBuffer buffer_handle, 539 const VkAllocationCallbacks* allocator) { 540 if (!allocator) 541 allocator = &device->allocator; 542 Buffer* buffer = GetBufferFromHandle(buffer_handle); 543 allocator->pfnFree(allocator->pUserData, buffer); 544} 545 546// ----------------------------------------------------------------------------- 547// Image 548 549struct Image { 550 typedef VkImage HandleType; 551 VkDeviceSize size; 552}; 553DEFINE_OBJECT_HANDLE_CONVERSION(Image) 554 555VkResult CreateImage(VkDevice device, 556 const VkImageCreateInfo* create_info, 557 const VkAllocationCallbacks* allocator, 558 VkImage* image_handle) { 559 if (create_info->imageType != VK_IMAGE_TYPE_2D || 560 create_info->format != VK_FORMAT_R8G8B8A8_UNORM || 561 create_info->mipLevels != 1) { 562 ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u", 563 create_info->imageType, create_info->format, 564 create_info->mipLevels); 565 return VK_ERROR_OUT_OF_HOST_MEMORY; 566 } 567 568 VkDeviceSize size = 569 VkDeviceSize(create_info->extent.width * create_info->extent.height) * 570 create_info->arrayLayers * create_info->samples * 4u; 571 ALOGW_IF(size > kMaxDeviceMemory, 572 "CreateImage: image size 0x%" PRIx64 573 " exceeds max device memory size 0x%" PRIx64, 574 size, kMaxDeviceMemory); 575 576 if (!allocator) 577 allocator = &device->allocator; 578 Image* image = static_cast<Image*>(allocator->pfnAllocation( 579 allocator->pUserData, sizeof(Image), alignof(Image), 580 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)); 581 if (!image) 582 return VK_ERROR_OUT_OF_HOST_MEMORY; 583 image->size = size; 584 *image_handle = GetHandleToImage(image); 585 return VK_SUCCESS; 586} 587 588void GetImageMemoryRequirements(VkDevice, 589 VkImage image_handle, 590 VkMemoryRequirements* requirements) { 591 Image* image = GetImageFromHandle(image_handle); 592 requirements->size = image->size; 593 requirements->alignment = 16; // allow fast Neon/SSE memcpy 594 requirements->memoryTypeBits = 0x1; 595} 596 597void DestroyImage(VkDevice device, 598 VkImage image_handle, 599 const VkAllocationCallbacks* allocator) { 600 if (!allocator) 601 allocator = &device->allocator; 602 Image* image = GetImageFromHandle(image_handle); 603 allocator->pfnFree(allocator->pUserData, image); 604} 605 606// ----------------------------------------------------------------------------- 607// No-op types 608 609VkResult CreateBufferView(VkDevice device, 610 const VkBufferViewCreateInfo*, 611 const VkAllocationCallbacks* /*allocator*/, 612 VkBufferView* view) { 613 *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView); 614 return VK_SUCCESS; 615} 616 617VkResult CreateDescriptorPool(VkDevice device, 618 const VkDescriptorPoolCreateInfo*, 619 const VkAllocationCallbacks* /*allocator*/, 620 VkDescriptorPool* pool) { 621 *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool); 622 return VK_SUCCESS; 623} 624 625VkResult AllocateDescriptorSets(VkDevice device, 626 const VkDescriptorSetAllocateInfo* alloc_info, 627 VkDescriptorSet* descriptor_sets) { 628 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++) 629 descriptor_sets[i] = 630 AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet); 631 return VK_SUCCESS; 632} 633 634VkResult CreateDescriptorSetLayout(VkDevice device, 635 const VkDescriptorSetLayoutCreateInfo*, 636 const VkAllocationCallbacks* /*allocator*/, 637 VkDescriptorSetLayout* layout) { 638 *layout = AllocHandle<VkDescriptorSetLayout>( 639 device, HandleType::kDescriptorSetLayout); 640 return VK_SUCCESS; 641} 642 643VkResult CreateEvent(VkDevice device, 644 const VkEventCreateInfo*, 645 const VkAllocationCallbacks* /*allocator*/, 646 VkEvent* event) { 647 *event = AllocHandle<VkEvent>(device, HandleType::kEvent); 648 return VK_SUCCESS; 649} 650 651VkResult CreateFence(VkDevice device, 652 const VkFenceCreateInfo*, 653 const VkAllocationCallbacks* /*allocator*/, 654 VkFence* fence) { 655 *fence = AllocHandle<VkFence>(device, HandleType::kFence); 656 return VK_SUCCESS; 657} 658 659VkResult CreateFramebuffer(VkDevice device, 660 const VkFramebufferCreateInfo*, 661 const VkAllocationCallbacks* /*allocator*/, 662 VkFramebuffer* framebuffer) { 663 *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer); 664 return VK_SUCCESS; 665} 666 667VkResult CreateImageView(VkDevice device, 668 const VkImageViewCreateInfo*, 669 const VkAllocationCallbacks* /*allocator*/, 670 VkImageView* view) { 671 *view = AllocHandle<VkImageView>(device, HandleType::kImageView); 672 return VK_SUCCESS; 673} 674 675VkResult CreateGraphicsPipelines(VkDevice device, 676 VkPipelineCache, 677 uint32_t count, 678 const VkGraphicsPipelineCreateInfo*, 679 const VkAllocationCallbacks* /*allocator*/, 680 VkPipeline* pipelines) { 681 for (uint32_t i = 0; i < count; i++) 682 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline); 683 return VK_SUCCESS; 684} 685 686VkResult CreateComputePipelines(VkDevice device, 687 VkPipelineCache, 688 uint32_t count, 689 const VkComputePipelineCreateInfo*, 690 const VkAllocationCallbacks* /*allocator*/, 691 VkPipeline* pipelines) { 692 for (uint32_t i = 0; i < count; i++) 693 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline); 694 return VK_SUCCESS; 695} 696 697VkResult CreatePipelineCache(VkDevice device, 698 const VkPipelineCacheCreateInfo*, 699 const VkAllocationCallbacks* /*allocator*/, 700 VkPipelineCache* cache) { 701 *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache); 702 return VK_SUCCESS; 703} 704 705VkResult CreatePipelineLayout(VkDevice device, 706 const VkPipelineLayoutCreateInfo*, 707 const VkAllocationCallbacks* /*allocator*/, 708 VkPipelineLayout* layout) { 709 *layout = 710 AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout); 711 return VK_SUCCESS; 712} 713 714VkResult CreateQueryPool(VkDevice device, 715 const VkQueryPoolCreateInfo*, 716 const VkAllocationCallbacks* /*allocator*/, 717 VkQueryPool* pool) { 718 *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool); 719 return VK_SUCCESS; 720} 721 722VkResult CreateRenderPass(VkDevice device, 723 const VkRenderPassCreateInfo*, 724 const VkAllocationCallbacks* /*allocator*/, 725 VkRenderPass* renderpass) { 726 *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass); 727 return VK_SUCCESS; 728} 729 730VkResult CreateSampler(VkDevice device, 731 const VkSamplerCreateInfo*, 732 const VkAllocationCallbacks* /*allocator*/, 733 VkSampler* sampler) { 734 *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler); 735 return VK_SUCCESS; 736} 737 738VkResult CreateSemaphore(VkDevice device, 739 const VkSemaphoreCreateInfo*, 740 const VkAllocationCallbacks* /*allocator*/, 741 VkSemaphore* semaphore) { 742 *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore); 743 return VK_SUCCESS; 744} 745 746VkResult CreateShaderModule(VkDevice device, 747 const VkShaderModuleCreateInfo*, 748 const VkAllocationCallbacks* /*allocator*/, 749 VkShaderModule* module) { 750 *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule); 751 return VK_SUCCESS; 752} 753 754VkResult CreateDebugReportCallbackEXT(VkInstance instance, 755 const VkDebugReportCallbackCreateInfoEXT*, 756 const VkAllocationCallbacks*, 757 VkDebugReportCallbackEXT* callback) { 758 *callback = AllocHandle<VkDebugReportCallbackEXT>( 759 instance, HandleType::kDebugReportCallbackEXT); 760 return VK_SUCCESS; 761} 762 763VkResult GetSwapchainGrallocUsageANDROID(VkDevice, 764 VkFormat, 765 VkImageUsageFlags, 766 int* grallocUsage) { 767 // The null driver never reads or writes the gralloc buffer 768 *grallocUsage = 0; 769 return VK_SUCCESS; 770} 771 772VkResult AcquireImageANDROID(VkDevice, 773 VkImage, 774 int fence, 775 VkSemaphore, 776 VkFence) { 777 close(fence); 778 return VK_SUCCESS; 779} 780 781VkResult QueueSignalReleaseImageANDROID(VkQueue, 782 uint32_t, 783 const VkSemaphore*, 784 VkImage, 785 int* fence) { 786 *fence = -1; 787 return VK_SUCCESS; 788} 789 790// ----------------------------------------------------------------------------- 791// No-op entrypoints 792 793// clang-format off 794#pragma clang diagnostic push 795#pragma clang diagnostic ignored "-Wunused-parameter" 796 797void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) { 798 ALOGV("TODO: vk%s", __FUNCTION__); 799} 800 801void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) { 802 ALOGV("TODO: vk%s", __FUNCTION__); 803} 804 805VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) { 806 ALOGV("TODO: vk%s", __FUNCTION__); 807 return VK_SUCCESS; 808} 809 810VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) { 811 ALOGV("TODO: vk%s", __FUNCTION__); 812 return VK_SUCCESS; 813} 814 815VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) { 816 ALOGV("TODO: vk%s", __FUNCTION__); 817 return VK_SUCCESS; 818} 819 820VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) { 821 ALOGV("TODO: vk%s", __FUNCTION__); 822 return VK_SUCCESS; 823} 824 825VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) { 826 return VK_SUCCESS; 827} 828 829VkResult QueueWaitIdle(VkQueue queue) { 830 ALOGV("TODO: vk%s", __FUNCTION__); 831 return VK_SUCCESS; 832} 833 834VkResult DeviceWaitIdle(VkDevice device) { 835 ALOGV("TODO: vk%s", __FUNCTION__); 836 return VK_SUCCESS; 837} 838 839void UnmapMemory(VkDevice device, VkDeviceMemory mem) { 840} 841 842VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) { 843 ALOGV("TODO: vk%s", __FUNCTION__); 844 return VK_SUCCESS; 845} 846 847VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) { 848 ALOGV("TODO: vk%s", __FUNCTION__); 849 return VK_SUCCESS; 850} 851 852void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) { 853 ALOGV("TODO: vk%s", __FUNCTION__); 854} 855 856VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) { 857 return VK_SUCCESS; 858} 859 860VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) { 861 return VK_SUCCESS; 862} 863 864void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) { 865 ALOGV("TODO: vk%s", __FUNCTION__); 866} 867 868void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) { 869 ALOGV("TODO: vk%s", __FUNCTION__); 870} 871 872VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) { 873 ALOGV("TODO: vk%s", __FUNCTION__); 874 return VK_SUCCESS; 875} 876 877void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) { 878} 879 880VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) { 881 return VK_SUCCESS; 882} 883 884VkResult GetFenceStatus(VkDevice device, VkFence fence) { 885 ALOGV("TODO: vk%s", __FUNCTION__); 886 return VK_SUCCESS; 887} 888 889VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) { 890 return VK_SUCCESS; 891} 892 893void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) { 894} 895 896void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) { 897} 898 899VkResult GetEventStatus(VkDevice device, VkEvent event) { 900 ALOGV("TODO: vk%s", __FUNCTION__); 901 return VK_SUCCESS; 902} 903 904VkResult SetEvent(VkDevice device, VkEvent event) { 905 ALOGV("TODO: vk%s", __FUNCTION__); 906 return VK_SUCCESS; 907} 908 909VkResult ResetEvent(VkDevice device, VkEvent event) { 910 ALOGV("TODO: vk%s", __FUNCTION__); 911 return VK_SUCCESS; 912} 913 914void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) { 915} 916 917VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) { 918 ALOGV("TODO: vk%s", __FUNCTION__); 919 return VK_SUCCESS; 920} 921 922void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) { 923} 924 925void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) { 926 ALOGV("TODO: vk%s", __FUNCTION__); 927} 928 929void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) { 930} 931 932void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) { 933} 934 935void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) { 936} 937 938VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) { 939 ALOGV("TODO: vk%s", __FUNCTION__); 940 return VK_SUCCESS; 941} 942 943VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) { 944 ALOGV("TODO: vk%s", __FUNCTION__); 945 return VK_SUCCESS; 946} 947 948void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) { 949} 950 951void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) { 952} 953 954void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) { 955} 956 957void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) { 958} 959 960void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) { 961} 962 963VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) { 964 ALOGV("TODO: vk%s", __FUNCTION__); 965 return VK_SUCCESS; 966} 967 968void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) { 969 ALOGV("TODO: vk%s", __FUNCTION__); 970} 971 972VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) { 973 ALOGV("TODO: vk%s", __FUNCTION__); 974 return VK_SUCCESS; 975} 976 977void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) { 978} 979 980void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) { 981} 982 983void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) { 984 ALOGV("TODO: vk%s", __FUNCTION__); 985} 986 987VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) { 988 ALOGV("TODO: vk%s", __FUNCTION__); 989 return VK_SUCCESS; 990} 991 992VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) { 993 return VK_SUCCESS; 994} 995 996VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) { 997 return VK_SUCCESS; 998} 999 1000VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) { 1001 ALOGV("TODO: vk%s", __FUNCTION__); 1002 return VK_SUCCESS; 1003} 1004 1005void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) { 1006} 1007 1008void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) { 1009} 1010 1011void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) { 1012} 1013 1014void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) { 1015} 1016 1017void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) { 1018} 1019 1020void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) { 1021} 1022 1023void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) { 1024} 1025 1026void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) { 1027} 1028 1029void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) { 1030} 1031 1032void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) { 1033} 1034 1035void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) { 1036} 1037 1038void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) { 1039} 1040 1041void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) { 1042} 1043 1044void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { 1045} 1046 1047void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { 1048} 1049 1050void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 1051} 1052 1053void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 1054} 1055 1056void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) { 1057} 1058 1059void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) { 1060} 1061 1062void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) { 1063} 1064 1065void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) { 1066} 1067 1068void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) { 1069} 1070 1071void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) { 1072} 1073 1074void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) { 1075} 1076 1077void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) { 1078} 1079 1080void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) { 1081} 1082 1083void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { 1084} 1085 1086void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { 1087} 1088 1089void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) { 1090} 1091 1092void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) { 1093} 1094 1095void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 1096} 1097 1098void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 1099} 1100 1101void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) { 1102} 1103 1104void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) { 1105} 1106 1107void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) { 1108} 1109 1110void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) { 1111} 1112 1113void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) { 1114} 1115 1116void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) { 1117} 1118 1119void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) { 1120} 1121 1122void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) { 1123} 1124 1125void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) { 1126} 1127 1128void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) { 1129} 1130 1131void CmdEndRenderPass(VkCommandBuffer cmdBuffer) { 1132} 1133 1134void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) { 1135} 1136 1137void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) { 1138} 1139 1140void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) { 1141} 1142 1143#pragma clang diagnostic pop 1144// clang-format on 1145 1146} // namespace null_driver 1147