null_driver.cpp revision dc6d36cb7f1d35059ae8153168e379fc925cb468
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCmdBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kDescriptorPool,
71    kDescriptorSet,
72    kDescriptorSetLayout,
73    kEvent,
74    kFence,
75    kFramebuffer,
76    kImageView,
77    kPipeline,
78    kPipelineCache,
79    kPipelineLayout,
80    kQueryPool,
81    kRenderPass,
82    kSampler,
83    kSemaphore,
84    kShader,
85    kShaderModule,
86
87    kNumTypes
88};
89}  // namespace HandleType
90uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
91
92const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
93
94}  // anonymous namespace
95
96struct VkDevice_T {
97    hwvulkan_dispatch_t dispatch;
98    VkAllocCallbacks allocator;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133VkResult CreateInstance(const VkInstanceCreateInfo* /*create_info*/,
134                        const VkAllocCallbacks* allocator,
135                        VkInstance* out_instance) {
136    // Assume the loader provided alloc callbacks even if the app didn't.
137    ALOG_ASSERT(
138        allocator,
139        "Missing alloc callbacks, loader or app should have provided them");
140
141    VkInstance_T* instance = static_cast<VkInstance_T*>(allocator->pfnAlloc(
142        allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
143        VK_SYSTEM_ALLOC_SCOPE_INSTANCE));
144    if (!instance)
145        return VK_ERROR_OUT_OF_HOST_MEMORY;
146
147    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
148    instance->allocator = *allocator;
149    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
150
151    *out_instance = instance;
152    return VK_SUCCESS;
153}
154
155int CloseDevice(struct hw_device_t* /*device*/) {
156    // nothing to do - opening a device doesn't allocate any resources
157    return 0;
158}
159
160hwvulkan_device_t nulldrv_device = {
161    .common =
162        {
163            .tag = HARDWARE_DEVICE_TAG,
164            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
165            .module = &HAL_MODULE_INFO_SYM.common,
166            .close = CloseDevice,
167        },
168    .EnumerateInstanceExtensionProperties =
169        EnumerateInstanceExtensionProperties,
170    .CreateInstance = CreateInstance,
171    .GetInstanceProcAddr = GetInstanceProcAddr};
172
173int OpenDevice(const hw_module_t* /*module*/,
174               const char* id,
175               hw_device_t** device) {
176    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
177        *device = &nulldrv_device.common;
178        return 0;
179    }
180    return -ENOENT;
181}
182
183VkInstance_T* GetInstanceFromPhysicalDevice(
184    VkPhysicalDevice_T* physical_device) {
185    return reinterpret_cast<VkInstance_T*>(
186        reinterpret_cast<uintptr_t>(physical_device) -
187        offsetof(VkInstance_T, physical_device));
188}
189
190uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
191    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
192    ALOGE_IF(device->next_handle[type] == kHandleMask,
193             "non-dispatchable handles of type=%u are about to overflow", type);
194    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
195           (device->next_handle[type]++ & kHandleMask);
196}
197
198}  // namespace
199
200namespace null_driver {
201
202#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
203    T* Get##T##FromHandle(Vk##T h);                     \
204    T* Get##T##FromHandle(Vk##T h) {                    \
205        return reinterpret_cast<T*>(uintptr_t(h));      \
206    }                                                   \
207    Vk##T GetHandleTo##T(const T* obj);                 \
208    Vk##T GetHandleTo##T(const T* obj) {                \
209        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
210    }
211
212// -----------------------------------------------------------------------------
213// Global
214
215VkResult EnumerateInstanceExtensionProperties(const char*,
216                                              uint32_t* count,
217                                              VkExtensionProperties*) {
218    *count = 0;
219    return VK_SUCCESS;
220}
221
222PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
223    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
224    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
225        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
226    return proc;
227}
228
229PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
230    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
231    if (proc)
232        return proc;
233    if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0)
234        return reinterpret_cast<PFN_vkVoidFunction>(
235            GetSwapchainGrallocUsageANDROID);
236    if (strcmp(name, "vkAcquireImageANDROID") == 0)
237        return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID);
238    if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0)
239        return reinterpret_cast<PFN_vkVoidFunction>(
240            QueueSignalReleaseImageANDROID);
241    return nullptr;
242}
243
244// -----------------------------------------------------------------------------
245// Instance
246
247void DestroyInstance(VkInstance instance,
248                     const VkAllocCallbacks* /*allocator*/) {
249    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
250}
251
252// -----------------------------------------------------------------------------
253// PhysicalDevice
254
255VkResult EnumeratePhysicalDevices(VkInstance instance,
256                                  uint32_t* physical_device_count,
257                                  VkPhysicalDevice* physical_devices) {
258    if (physical_devices && *physical_device_count >= 1)
259        physical_devices[0] = &instance->physical_device;
260    *physical_device_count = 1;
261    return VK_SUCCESS;
262}
263
264void GetPhysicalDeviceProperties(VkPhysicalDevice,
265                                 VkPhysicalDeviceProperties* properties) {
266    properties->apiVersion = VK_API_VERSION;
267    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
268    properties->vendorId = 0;
269    properties->deviceId = 0;
270    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
271    strcpy(properties->deviceName, "Android Vulkan Null Driver");
272    memset(properties->pipelineCacheUUID, 0,
273           sizeof(properties->pipelineCacheUUID));
274}
275
276void GetPhysicalDeviceQueueFamilyProperties(
277    VkPhysicalDevice,
278    uint32_t* count,
279    VkQueueFamilyProperties* properties) {
280    if (properties) {
281        properties->queueFlags =
282            VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_DMA_BIT;
283        properties->queueCount = 1;
284        properties->timestampValidBits = 64;
285    }
286    *count = 1;
287}
288
289void GetPhysicalDeviceMemoryProperties(
290    VkPhysicalDevice,
291    VkPhysicalDeviceMemoryProperties* properties) {
292    properties->memoryTypeCount = 1;
293    properties->memoryTypes[0].propertyFlags =
294        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
295    properties->memoryTypes[0].heapIndex = 0;
296    properties->memoryHeapCount = 1;
297    properties->memoryHeaps[0].size = kMaxDeviceMemory;
298    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_HOST_LOCAL_BIT;
299}
300
301// -----------------------------------------------------------------------------
302// Device
303
304VkResult CreateDevice(VkPhysicalDevice physical_device,
305                      const VkDeviceCreateInfo*,
306                      const VkAllocCallbacks* allocator,
307                      VkDevice* out_device) {
308    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
309    if (!allocator)
310        allocator = &instance->allocator;
311    VkDevice_T* device = static_cast<VkDevice_T*>(
312        allocator->pfnAlloc(allocator->pUserData, sizeof(VkDevice_T),
313                            alignof(VkDevice_T), VK_SYSTEM_ALLOC_SCOPE_DEVICE));
314    if (!device)
315        return VK_ERROR_OUT_OF_HOST_MEMORY;
316
317    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
318    device->allocator = *allocator;
319    device->instance = instance;
320    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
321    std::fill(device->next_handle.begin(), device->next_handle.end(),
322              UINT64_C(0));
323
324    *out_device = device;
325    return VK_SUCCESS;
326}
327
328void DestroyDevice(VkDevice device, const VkAllocCallbacks* /*allocator*/) {
329    if (!device)
330        return;
331    device->allocator.pfnFree(device->allocator.pUserData, device);
332}
333
334void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
335    *queue = &device->queue;
336}
337
338// -----------------------------------------------------------------------------
339// CmdPool
340
341struct CmdPool {
342    typedef VkCmdPool HandleType;
343    VkAllocCallbacks allocator;
344};
345DEFINE_OBJECT_HANDLE_CONVERSION(CmdPool)
346
347VkResult CreateCommandPool(VkDevice device,
348                           const VkCmdPoolCreateInfo* /*create_info*/,
349                           const VkAllocCallbacks* allocator,
350                           VkCmdPool* cmd_pool) {
351    if (!allocator)
352        allocator = &device->allocator;
353    CmdPool* pool = static_cast<CmdPool*>(
354        allocator->pfnAlloc(allocator->pUserData, sizeof(CmdPool),
355                            alignof(CmdPool), VK_SYSTEM_ALLOC_SCOPE_OBJECT));
356    if (!pool)
357        return VK_ERROR_OUT_OF_HOST_MEMORY;
358    pool->allocator = *allocator;
359    *cmd_pool = GetHandleToCmdPool(pool);
360    return VK_SUCCESS;
361}
362
363void DestroyCommandPool(VkDevice /*device*/,
364                        VkCmdPool cmd_pool,
365                        const VkAllocCallbacks* /*allocator*/) {
366    CmdPool* pool = GetCmdPoolFromHandle(cmd_pool);
367    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
368}
369
370// -----------------------------------------------------------------------------
371// CmdBuffer
372
373VkResult AllocCommandBuffers(VkDevice /*device*/,
374                             const VkCmdBufferAllocInfo* alloc_info,
375                             VkCmdBuffer* cmdbufs) {
376    VkResult result = VK_SUCCESS;
377    CmdPool& pool = *GetCmdPoolFromHandle(alloc_info->cmdPool);
378    std::fill(cmdbufs, cmdbufs + alloc_info->bufferCount, nullptr);
379    for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
380        cmdbufs[i] = static_cast<VkCmdBuffer_T*>(pool.allocator.pfnAlloc(
381            pool.allocator.pUserData, sizeof(VkCmdBuffer_T),
382            alignof(VkCmdBuffer_T), VK_SYSTEM_ALLOC_SCOPE_OBJECT));
383        if (!cmdbufs[i]) {
384            result = VK_ERROR_OUT_OF_HOST_MEMORY;
385            break;
386        }
387        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
388    }
389    if (result != VK_SUCCESS) {
390        for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
391            if (!cmdbufs[i])
392                break;
393            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
394        }
395    }
396    return result;
397}
398
399void FreeCommandBuffers(VkDevice /*device*/,
400                        VkCmdPool cmd_pool,
401                        uint32_t count,
402                        const VkCmdBuffer* cmdbufs) {
403    CmdPool& pool = *GetCmdPoolFromHandle(cmd_pool);
404    for (uint32_t i = 0; i < count; i++)
405        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
406}
407
408// -----------------------------------------------------------------------------
409// DeviceMemory
410
411struct DeviceMemory {
412    typedef VkDeviceMemory HandleType;
413    VkDeviceSize size;
414    alignas(16) uint8_t data[0];
415};
416DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
417
418VkResult AllocMemory(VkDevice device,
419                     const VkMemoryAllocInfo* alloc_info,
420                     const VkAllocCallbacks* allocator,
421                     VkDeviceMemory* mem_handle) {
422    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
423        return VK_ERROR_OUT_OF_HOST_MEMORY;
424    if (!allocator)
425        allocator = &device->allocator;
426
427    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
428    DeviceMemory* mem = static_cast<DeviceMemory*>(
429        allocator->pfnAlloc(allocator->pUserData, size, alignof(DeviceMemory),
430                            VK_SYSTEM_ALLOC_SCOPE_OBJECT));
431    if (!mem)
432        return VK_ERROR_OUT_OF_HOST_MEMORY;
433    mem->size = size;
434    *mem_handle = GetHandleToDeviceMemory(mem);
435    return VK_SUCCESS;
436}
437
438void FreeMemory(VkDevice device,
439                VkDeviceMemory mem_handle,
440                const VkAllocCallbacks* allocator) {
441    if (!allocator)
442        allocator = &device->allocator;
443    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
444    allocator->pfnFree(allocator->pUserData, mem);
445}
446
447VkResult MapMemory(VkDevice,
448                   VkDeviceMemory mem_handle,
449                   VkDeviceSize offset,
450                   VkDeviceSize,
451                   VkMemoryMapFlags,
452                   void** out_ptr) {
453    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
454    *out_ptr = &mem->data[0] + offset;
455    return VK_SUCCESS;
456}
457
458// -----------------------------------------------------------------------------
459// Buffer
460
461struct Buffer {
462    typedef VkBuffer HandleType;
463    VkDeviceSize size;
464};
465DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
466
467VkResult CreateBuffer(VkDevice device,
468                      const VkBufferCreateInfo* create_info,
469                      const VkAllocCallbacks* allocator,
470                      VkBuffer* buffer_handle) {
471    ALOGW_IF(create_info->size > kMaxDeviceMemory,
472             "CreateBuffer: requested size 0x%" PRIx64
473             " exceeds max device memory size 0x%" PRIx64,
474             create_info->size, kMaxDeviceMemory);
475    if (!allocator)
476        allocator = &device->allocator;
477    Buffer* buffer = static_cast<Buffer*>(
478        allocator->pfnAlloc(allocator->pUserData, sizeof(Buffer),
479                            alignof(Buffer), VK_SYSTEM_ALLOC_SCOPE_OBJECT));
480    if (!buffer)
481        return VK_ERROR_OUT_OF_HOST_MEMORY;
482    buffer->size = create_info->size;
483    *buffer_handle = GetHandleToBuffer(buffer);
484    return VK_SUCCESS;
485}
486
487void GetBufferMemoryRequirements(VkDevice,
488                                 VkBuffer buffer_handle,
489                                 VkMemoryRequirements* requirements) {
490    Buffer* buffer = GetBufferFromHandle(buffer_handle);
491    requirements->size = buffer->size;
492    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
493    requirements->memoryTypeBits = 0x1;
494}
495
496void DestroyBuffer(VkDevice device,
497                   VkBuffer buffer_handle,
498                   const VkAllocCallbacks* allocator) {
499    if (!allocator)
500        allocator = &device->allocator;
501    Buffer* buffer = GetBufferFromHandle(buffer_handle);
502    allocator->pfnFree(allocator->pUserData, buffer);
503}
504
505// -----------------------------------------------------------------------------
506// Image
507
508struct Image {
509    typedef VkImage HandleType;
510    VkDeviceSize size;
511};
512DEFINE_OBJECT_HANDLE_CONVERSION(Image)
513
514VkResult CreateImage(VkDevice device,
515                     const VkImageCreateInfo* create_info,
516                     const VkAllocCallbacks* allocator,
517                     VkImage* image_handle) {
518    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
519        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
520        create_info->mipLevels != 1) {
521        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
522              create_info->imageType, create_info->format,
523              create_info->mipLevels);
524        return VK_ERROR_OUT_OF_HOST_MEMORY;
525    }
526
527    VkDeviceSize size =
528        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
529        create_info->arrayLayers * create_info->samples * 4u;
530    ALOGW_IF(size > kMaxDeviceMemory,
531             "CreateImage: image size 0x%" PRIx64
532             " exceeds max device memory size 0x%" PRIx64,
533             size, kMaxDeviceMemory);
534
535    if (!allocator)
536        allocator = &device->allocator;
537    Image* image = static_cast<Image*>(
538        allocator->pfnAlloc(allocator->pUserData, sizeof(Image), alignof(Image),
539                            VK_SYSTEM_ALLOC_SCOPE_OBJECT));
540    if (!image)
541        return VK_ERROR_OUT_OF_HOST_MEMORY;
542    image->size = size;
543    *image_handle = GetHandleToImage(image);
544    return VK_SUCCESS;
545}
546
547void GetImageMemoryRequirements(VkDevice,
548                                VkImage image_handle,
549                                VkMemoryRequirements* requirements) {
550    Image* image = GetImageFromHandle(image_handle);
551    requirements->size = image->size;
552    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
553    requirements->memoryTypeBits = 0x1;
554}
555
556void DestroyImage(VkDevice device,
557                  VkImage image_handle,
558                  const VkAllocCallbacks* allocator) {
559    if (!allocator)
560        allocator = &device->allocator;
561    Image* image = GetImageFromHandle(image_handle);
562    allocator->pfnFree(allocator->pUserData, image);
563}
564
565// -----------------------------------------------------------------------------
566// No-op types
567
568VkResult CreateBufferView(VkDevice device,
569                          const VkBufferViewCreateInfo*,
570                          const VkAllocCallbacks* /*allocator*/,
571                          VkBufferView* view) {
572    *view = AllocHandle(device, HandleType::kBufferView);
573    return VK_SUCCESS;
574}
575
576VkResult CreateDescriptorPool(VkDevice device,
577                              const VkDescriptorPoolCreateInfo*,
578                              const VkAllocCallbacks* /*allocator*/,
579                              VkDescriptorPool* pool) {
580    *pool = AllocHandle(device, HandleType::kDescriptorPool);
581    return VK_SUCCESS;
582}
583
584VkResult AllocDescriptorSets(VkDevice device,
585                             const VkDescriptorSetAllocInfo* alloc_info,
586                             VkDescriptorSet* descriptor_sets) {
587    for (uint32_t i = 0; i < alloc_info->setLayoutCount; i++)
588        descriptor_sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
589    return VK_SUCCESS;
590}
591
592VkResult CreateDescriptorSetLayout(VkDevice device,
593                                   const VkDescriptorSetLayoutCreateInfo*,
594                                   const VkAllocCallbacks* /*allocator*/,
595                                   VkDescriptorSetLayout* layout) {
596    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
597    return VK_SUCCESS;
598}
599
600VkResult CreateEvent(VkDevice device,
601                     const VkEventCreateInfo*,
602                     const VkAllocCallbacks* /*allocator*/,
603                     VkEvent* event) {
604    *event = AllocHandle(device, HandleType::kEvent);
605    return VK_SUCCESS;
606}
607
608VkResult CreateFence(VkDevice device,
609                     const VkFenceCreateInfo*,
610                     const VkAllocCallbacks* /*allocator*/,
611                     VkFence* fence) {
612    *fence = AllocHandle(device, HandleType::kFence);
613    return VK_SUCCESS;
614}
615
616VkResult CreateFramebuffer(VkDevice device,
617                           const VkFramebufferCreateInfo*,
618                           const VkAllocCallbacks* /*allocator*/,
619                           VkFramebuffer* framebuffer) {
620    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
621    return VK_SUCCESS;
622}
623
624VkResult CreateImageView(VkDevice device,
625                         const VkImageViewCreateInfo*,
626                         const VkAllocCallbacks* /*allocator*/,
627                         VkImageView* view) {
628    *view = AllocHandle(device, HandleType::kImageView);
629    return VK_SUCCESS;
630}
631
632VkResult CreateGraphicsPipelines(VkDevice device,
633                                 VkPipelineCache,
634                                 uint32_t count,
635                                 const VkGraphicsPipelineCreateInfo*,
636                                 const VkAllocCallbacks* /*allocator*/,
637                                 VkPipeline* pipelines) {
638    for (uint32_t i = 0; i < count; i++)
639        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
640    return VK_SUCCESS;
641}
642
643VkResult CreateComputePipelines(VkDevice device,
644                                VkPipelineCache,
645                                uint32_t count,
646                                const VkComputePipelineCreateInfo*,
647                                const VkAllocCallbacks* /*allocator*/,
648                                VkPipeline* pipelines) {
649    for (uint32_t i = 0; i < count; i++)
650        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
651    return VK_SUCCESS;
652}
653
654VkResult CreatePipelineCache(VkDevice device,
655                             const VkPipelineCacheCreateInfo*,
656                             const VkAllocCallbacks* /*allocator*/,
657                             VkPipelineCache* cache) {
658    *cache = AllocHandle(device, HandleType::kPipelineCache);
659    return VK_SUCCESS;
660}
661
662VkResult CreatePipelineLayout(VkDevice device,
663                              const VkPipelineLayoutCreateInfo*,
664                              const VkAllocCallbacks* /*allocator*/,
665                              VkPipelineLayout* layout) {
666    *layout = AllocHandle(device, HandleType::kPipelineLayout);
667    return VK_SUCCESS;
668}
669
670VkResult CreateQueryPool(VkDevice device,
671                         const VkQueryPoolCreateInfo*,
672                         const VkAllocCallbacks* /*allocator*/,
673                         VkQueryPool* pool) {
674    *pool = AllocHandle(device, HandleType::kQueryPool);
675    return VK_SUCCESS;
676}
677
678VkResult CreateRenderPass(VkDevice device,
679                          const VkRenderPassCreateInfo*,
680                          const VkAllocCallbacks* /*allocator*/,
681                          VkRenderPass* renderpass) {
682    *renderpass = AllocHandle(device, HandleType::kRenderPass);
683    return VK_SUCCESS;
684}
685
686VkResult CreateSampler(VkDevice device,
687                       const VkSamplerCreateInfo*,
688                       const VkAllocCallbacks* /*allocator*/,
689                       VkSampler* sampler) {
690    *sampler = AllocHandle(device, HandleType::kSampler);
691    return VK_SUCCESS;
692}
693
694VkResult CreateSemaphore(VkDevice device,
695                         const VkSemaphoreCreateInfo*,
696                         const VkAllocCallbacks* /*allocator*/,
697                         VkSemaphore* semaphore) {
698    *semaphore = AllocHandle(device, HandleType::kSemaphore);
699    return VK_SUCCESS;
700}
701
702VkResult CreateShader(VkDevice device,
703                      const VkShaderCreateInfo*,
704                      const VkAllocCallbacks* /*allocator*/,
705                      VkShader* shader) {
706    *shader = AllocHandle(device, HandleType::kShader);
707    return VK_SUCCESS;
708}
709
710VkResult CreateShaderModule(VkDevice device,
711                            const VkShaderModuleCreateInfo*,
712                            const VkAllocCallbacks* /*allocator*/,
713                            VkShaderModule* module) {
714    *module = AllocHandle(device, HandleType::kShaderModule);
715    return VK_SUCCESS;
716}
717
718VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
719                                         VkFormat,
720                                         VkImageUsageFlags,
721                                         int* grallocUsage) {
722    // The null driver never reads or writes the gralloc buffer
723    *grallocUsage = 0;
724    return VK_SUCCESS;
725}
726
727VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) {
728    close(fence);
729    return VK_SUCCESS;
730}
731
732VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) {
733    *fence = -1;
734    return VK_SUCCESS;
735}
736
737// -----------------------------------------------------------------------------
738// No-op entrypoints
739
740// clang-format off
741#pragma clang diagnostic push
742#pragma clang diagnostic ignored "-Wunused-parameter"
743
744void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
745    ALOGV("TODO: vk%s", __FUNCTION__);
746}
747
748void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
749    ALOGV("TODO: vk%s", __FUNCTION__);
750}
751
752void GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
753    ALOGV("TODO: vk%s", __FUNCTION__);
754}
755
756VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
757    ALOGV("TODO: vk%s", __FUNCTION__);
758    return VK_SUCCESS;
759}
760
761VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
762    ALOGV("TODO: vk%s", __FUNCTION__);
763    return VK_SUCCESS;
764}
765
766VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
767    ALOGV("TODO: vk%s", __FUNCTION__);
768    return VK_SUCCESS;
769}
770
771VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
772    return VK_SUCCESS;
773}
774
775VkResult QueueWaitIdle(VkQueue queue) {
776    ALOGV("TODO: vk%s", __FUNCTION__);
777    return VK_SUCCESS;
778}
779
780VkResult DeviceWaitIdle(VkDevice device) {
781    ALOGV("TODO: vk%s", __FUNCTION__);
782    return VK_SUCCESS;
783}
784
785void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
786}
787
788VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
789    ALOGV("TODO: vk%s", __FUNCTION__);
790    return VK_SUCCESS;
791}
792
793VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
794    ALOGV("TODO: vk%s", __FUNCTION__);
795    return VK_SUCCESS;
796}
797
798void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
799    ALOGV("TODO: vk%s", __FUNCTION__);
800}
801
802VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
803    return VK_SUCCESS;
804}
805
806VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
807    return VK_SUCCESS;
808}
809
810void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
811    ALOGV("TODO: vk%s", __FUNCTION__);
812}
813
814void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
815    ALOGV("TODO: vk%s", __FUNCTION__);
816}
817
818VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
819    ALOGV("TODO: vk%s", __FUNCTION__);
820    return VK_SUCCESS;
821}
822
823void DestroyFence(VkDevice device, VkFence fence, const VkAllocCallbacks* allocator) {
824}
825
826VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
827    return VK_SUCCESS;
828}
829
830VkResult GetFenceStatus(VkDevice device, VkFence fence) {
831    ALOGV("TODO: vk%s", __FUNCTION__);
832    return VK_SUCCESS;
833}
834
835VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
836    return VK_SUCCESS;
837}
838
839void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocCallbacks* allocator) {
840}
841
842void DestroyEvent(VkDevice device, VkEvent event, const VkAllocCallbacks* allocator) {
843}
844
845VkResult GetEventStatus(VkDevice device, VkEvent event) {
846    ALOGV("TODO: vk%s", __FUNCTION__);
847    return VK_SUCCESS;
848}
849
850VkResult SetEvent(VkDevice device, VkEvent event) {
851    ALOGV("TODO: vk%s", __FUNCTION__);
852    return VK_SUCCESS;
853}
854
855VkResult ResetEvent(VkDevice device, VkEvent event) {
856    ALOGV("TODO: vk%s", __FUNCTION__);
857    return VK_SUCCESS;
858}
859
860void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocCallbacks* allocator) {
861}
862
863VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
864    ALOGV("TODO: vk%s", __FUNCTION__);
865    return VK_SUCCESS;
866}
867
868void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocCallbacks* allocator) {
869}
870
871void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
872    ALOGV("TODO: vk%s", __FUNCTION__);
873}
874
875void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocCallbacks* allocator) {
876}
877
878void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocCallbacks* allocator) {
879}
880
881void DestroyShader(VkDevice device, VkShader shader, const VkAllocCallbacks* allocator) {
882}
883
884void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocCallbacks* allocator) {
885}
886
887VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
888    ALOGV("TODO: vk%s", __FUNCTION__);
889    return VK_SUCCESS;
890}
891
892VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
893    ALOGV("TODO: vk%s", __FUNCTION__);
894    return VK_SUCCESS;
895}
896
897void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocCallbacks* allocator) {
898}
899
900void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocCallbacks* allocator) {
901}
902
903void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocCallbacks* allocator) {
904}
905
906void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocCallbacks* allocator) {
907}
908
909void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocCallbacks* allocator) {
910}
911
912VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
913    ALOGV("TODO: vk%s", __FUNCTION__);
914    return VK_SUCCESS;
915}
916
917void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
918    ALOGV("TODO: vk%s", __FUNCTION__);
919}
920
921VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
922    ALOGV("TODO: vk%s", __FUNCTION__);
923    return VK_SUCCESS;
924}
925
926void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocCallbacks* allocator) {
927}
928
929void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocCallbacks* allocator) {
930}
931
932void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
933    ALOGV("TODO: vk%s", __FUNCTION__);
934}
935
936VkResult ResetCommandPool(VkDevice device, VkCmdPool cmdPool, VkCmdPoolResetFlags flags) {
937    ALOGV("TODO: vk%s", __FUNCTION__);
938    return VK_SUCCESS;
939}
940
941VkResult BeginCommandBuffer(VkCmdBuffer cmdBuffer, const VkCmdBufferBeginInfo* pBeginInfo) {
942    return VK_SUCCESS;
943}
944
945VkResult EndCommandBuffer(VkCmdBuffer cmdBuffer) {
946    return VK_SUCCESS;
947}
948
949VkResult ResetCommandBuffer(VkCmdBuffer cmdBuffer, VkCmdBufferResetFlags flags) {
950    ALOGV("TODO: vk%s", __FUNCTION__);
951    return VK_SUCCESS;
952}
953
954void CmdBindPipeline(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
955}
956
957void CmdSetViewport(VkCmdBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
958}
959
960void CmdSetScissor(VkCmdBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
961}
962
963void CmdSetLineWidth(VkCmdBuffer cmdBuffer, float lineWidth) {
964}
965
966void CmdSetDepthBias(VkCmdBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
967}
968
969void CmdSetBlendConstants(VkCmdBuffer cmdBuffer, const float blendConst[4]) {
970}
971
972void CmdSetDepthBounds(VkCmdBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
973}
974
975void CmdSetStencilCompareMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
976}
977
978void CmdSetStencilWriteMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
979}
980
981void CmdSetStencilReference(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
982}
983
984void CmdBindDescriptorSets(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
985}
986
987void CmdBindIndexBuffer(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
988}
989
990void CmdBindVertexBuffers(VkCmdBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
991}
992
993void CmdDraw(VkCmdBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
994}
995
996void CmdDrawIndexed(VkCmdBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
997}
998
999void CmdDrawIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1000}
1001
1002void CmdDrawIndexedIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1003}
1004
1005void CmdDispatch(VkCmdBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1006}
1007
1008void CmdDispatchIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1009}
1010
1011void CmdCopyBuffer(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1012}
1013
1014void CmdCopyImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1015}
1016
1017void CmdBlitImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1018}
1019
1020void CmdCopyBufferToImage(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1021}
1022
1023void CmdCopyImageToBuffer(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1024}
1025
1026void CmdUpdateBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1027}
1028
1029void CmdFillBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1030}
1031
1032void CmdClearColorImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1033}
1034
1035void CmdClearDepthStencilImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1036}
1037
1038void CmdClearAttachments(VkCmdBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1039}
1040
1041void CmdResolveImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1042}
1043
1044void CmdSetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1045}
1046
1047void CmdResetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1048}
1049
1050void CmdWaitEvents(VkCmdBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1051}
1052
1053void CmdPipelineBarrier(VkCmdBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkDependencyFlags dependencyFlags, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1054}
1055
1056void CmdBeginQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1057}
1058
1059void CmdEndQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1060}
1061
1062void CmdResetQueryPool(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1063}
1064
1065void CmdWriteTimestamp(VkCmdBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1066}
1067
1068void CmdCopyQueryPoolResults(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1069}
1070
1071void CmdPushConstants(VkCmdBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1072}
1073
1074void CmdBeginRenderPass(VkCmdBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkRenderPassContents contents) {
1075}
1076
1077void CmdNextSubpass(VkCmdBuffer cmdBuffer, VkRenderPassContents contents) {
1078}
1079
1080void CmdEndRenderPass(VkCmdBuffer cmdBuffer) {
1081}
1082
1083void CmdExecuteCommands(VkCmdBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCmdBuffer* pCmdBuffers) {
1084}
1085
1086#pragma clang diagnostic pop
1087// clang-format on
1088
1089}  // namespace null_driver
1090