null_driver.cpp revision a9e5703e380d9d7f096d177adb792621a1e8d4ba
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCommandBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kDescriptorPool,
71    kDescriptorSet,
72    kDescriptorSetLayout,
73    kEvent,
74    kFence,
75    kFramebuffer,
76    kImageView,
77    kPipeline,
78    kPipelineCache,
79    kPipelineLayout,
80    kQueryPool,
81    kRenderPass,
82    kSampler,
83    kSemaphore,
84    kShaderModule,
85
86    kNumTypes
87};
88}  // namespace HandleType
89uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
90
91const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
92
93}  // anonymous namespace
94
95struct VkDevice_T {
96    hwvulkan_dispatch_t dispatch;
97    VkAllocationCallbacks allocator;
98    VkInstance_T* instance;
99    VkQueue_T queue;
100    std::array<uint64_t, HandleType::kNumTypes> next_handle;
101};
102
103// -----------------------------------------------------------------------------
104// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
105// later.
106
107namespace {
108int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
109hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
110}  // namespace
111
112#pragma clang diagnostic push
113#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
114__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
115    .common =
116        {
117            .tag = HARDWARE_MODULE_TAG,
118            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
119            .hal_api_version = HARDWARE_HAL_API_VERSION,
120            .id = HWVULKAN_HARDWARE_MODULE_ID,
121            .name = "Null Vulkan Driver",
122            .author = "The Android Open Source Project",
123            .methods = &nulldrv_module_methods,
124        },
125};
126#pragma clang diagnostic pop
127
128// -----------------------------------------------------------------------------
129
130namespace {
131
132VkResult CreateInstance(const VkInstanceCreateInfo* /*create_info*/,
133                        const VkAllocationCallbacks* allocator,
134                        VkInstance* out_instance) {
135    // Assume the loader provided alloc callbacks even if the app didn't.
136    ALOG_ASSERT(
137        allocator,
138        "Missing alloc callbacks, loader or app should have provided them");
139
140    VkInstance_T* instance =
141        static_cast<VkInstance_T*>(allocator->pfnAllocation(
142            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
143            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
144    if (!instance)
145        return VK_ERROR_OUT_OF_HOST_MEMORY;
146
147    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
148    instance->allocator = *allocator;
149    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
150
151    *out_instance = instance;
152    return VK_SUCCESS;
153}
154
155int CloseDevice(struct hw_device_t* /*device*/) {
156    // nothing to do - opening a device doesn't allocate any resources
157    return 0;
158}
159
160hwvulkan_device_t nulldrv_device = {
161    .common =
162        {
163            .tag = HARDWARE_DEVICE_TAG,
164            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
165            .module = &HAL_MODULE_INFO_SYM.common,
166            .close = CloseDevice,
167        },
168    .EnumerateInstanceExtensionProperties =
169        EnumerateInstanceExtensionProperties,
170    .CreateInstance = CreateInstance,
171    .GetInstanceProcAddr = GetInstanceProcAddr};
172
173int OpenDevice(const hw_module_t* /*module*/,
174               const char* id,
175               hw_device_t** device) {
176    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
177        *device = &nulldrv_device.common;
178        return 0;
179    }
180    return -ENOENT;
181}
182
183VkInstance_T* GetInstanceFromPhysicalDevice(
184    VkPhysicalDevice_T* physical_device) {
185    return reinterpret_cast<VkInstance_T*>(
186        reinterpret_cast<uintptr_t>(physical_device) -
187        offsetof(VkInstance_T, physical_device));
188}
189
190uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
191    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
192    ALOGE_IF(device->next_handle[type] == kHandleMask,
193             "non-dispatchable handles of type=%u are about to overflow", type);
194    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
195           (device->next_handle[type]++ & kHandleMask);
196}
197
198}  // namespace
199
200namespace null_driver {
201
202#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
203    T* Get##T##FromHandle(Vk##T h);                     \
204    T* Get##T##FromHandle(Vk##T h) {                    \
205        return reinterpret_cast<T*>(uintptr_t(h));      \
206    }                                                   \
207    Vk##T GetHandleTo##T(const T* obj);                 \
208    Vk##T GetHandleTo##T(const T* obj) {                \
209        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
210    }
211
212// -----------------------------------------------------------------------------
213// Global
214
215VkResult EnumerateInstanceExtensionProperties(const char*,
216                                              uint32_t* count,
217                                              VkExtensionProperties*) {
218    *count = 0;
219    return VK_SUCCESS;
220}
221
222PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
223    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
224    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
225        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
226    return proc;
227}
228
229PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
230    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
231    if (proc)
232        return proc;
233    if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0)
234        return reinterpret_cast<PFN_vkVoidFunction>(
235            GetSwapchainGrallocUsageANDROID);
236    if (strcmp(name, "vkAcquireImageANDROID") == 0)
237        return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID);
238    if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0)
239        return reinterpret_cast<PFN_vkVoidFunction>(
240            QueueSignalReleaseImageANDROID);
241    return nullptr;
242}
243
244// -----------------------------------------------------------------------------
245// Instance
246
247void DestroyInstance(VkInstance instance,
248                     const VkAllocationCallbacks* /*allocator*/) {
249    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
250}
251
252// -----------------------------------------------------------------------------
253// PhysicalDevice
254
255VkResult EnumeratePhysicalDevices(VkInstance instance,
256                                  uint32_t* physical_device_count,
257                                  VkPhysicalDevice* physical_devices) {
258    if (physical_devices && *physical_device_count >= 1)
259        physical_devices[0] = &instance->physical_device;
260    *physical_device_count = 1;
261    return VK_SUCCESS;
262}
263
264void GetPhysicalDeviceProperties(VkPhysicalDevice,
265                                 VkPhysicalDeviceProperties* properties) {
266    properties->apiVersion = VK_API_VERSION;
267    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
268    properties->vendorID = 0;
269    properties->deviceID = 0;
270    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
271    strcpy(properties->deviceName, "Android Vulkan Null Driver");
272    memset(properties->pipelineCacheUUID, 0,
273           sizeof(properties->pipelineCacheUUID));
274}
275
276void GetPhysicalDeviceQueueFamilyProperties(
277    VkPhysicalDevice,
278    uint32_t* count,
279    VkQueueFamilyProperties* properties) {
280    if (properties) {
281        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
282                                 VK_QUEUE_TRANSFER_BIT;
283        properties->queueCount = 1;
284        properties->timestampValidBits = 64;
285    }
286    *count = 1;
287}
288
289void GetPhysicalDeviceMemoryProperties(
290    VkPhysicalDevice,
291    VkPhysicalDeviceMemoryProperties* properties) {
292    properties->memoryTypeCount = 1;
293    properties->memoryTypes[0].propertyFlags =
294        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
295        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
296        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
297        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
298    properties->memoryTypes[0].heapIndex = 0;
299    properties->memoryHeapCount = 1;
300    properties->memoryHeaps[0].size = kMaxDeviceMemory;
301    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
302}
303
304// -----------------------------------------------------------------------------
305// Device
306
307VkResult CreateDevice(VkPhysicalDevice physical_device,
308                      const VkDeviceCreateInfo*,
309                      const VkAllocationCallbacks* allocator,
310                      VkDevice* out_device) {
311    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
312    if (!allocator)
313        allocator = &instance->allocator;
314    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
315        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
316        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
317    if (!device)
318        return VK_ERROR_OUT_OF_HOST_MEMORY;
319
320    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
321    device->allocator = *allocator;
322    device->instance = instance;
323    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
324    std::fill(device->next_handle.begin(), device->next_handle.end(),
325              UINT64_C(0));
326
327    *out_device = device;
328    return VK_SUCCESS;
329}
330
331void DestroyDevice(VkDevice device,
332                   const VkAllocationCallbacks* /*allocator*/) {
333    if (!device)
334        return;
335    device->allocator.pfnFree(device->allocator.pUserData, device);
336}
337
338void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
339    *queue = &device->queue;
340}
341
342// -----------------------------------------------------------------------------
343// CommandPool
344
345struct CommandPool {
346    typedef VkCommandPool HandleType;
347    VkAllocationCallbacks allocator;
348};
349DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
350
351VkResult CreateCommandPool(VkDevice device,
352                           const VkCommandPoolCreateInfo* /*create_info*/,
353                           const VkAllocationCallbacks* allocator,
354                           VkCommandPool* cmd_pool) {
355    if (!allocator)
356        allocator = &device->allocator;
357    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
358        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
359        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
360    if (!pool)
361        return VK_ERROR_OUT_OF_HOST_MEMORY;
362    pool->allocator = *allocator;
363    *cmd_pool = GetHandleToCommandPool(pool);
364    return VK_SUCCESS;
365}
366
367void DestroyCommandPool(VkDevice /*device*/,
368                        VkCommandPool cmd_pool,
369                        const VkAllocationCallbacks* /*allocator*/) {
370    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
371    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
372}
373
374// -----------------------------------------------------------------------------
375// CmdBuffer
376
377VkResult AllocateCommandBuffers(VkDevice /*device*/,
378                                const VkCommandBufferAllocateInfo* alloc_info,
379                                VkCommandBuffer* cmdbufs) {
380    VkResult result = VK_SUCCESS;
381    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
382    std::fill(cmdbufs, cmdbufs + alloc_info->bufferCount, nullptr);
383    for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
384        cmdbufs[i] =
385            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
386                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
387                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
388        if (!cmdbufs[i]) {
389            result = VK_ERROR_OUT_OF_HOST_MEMORY;
390            break;
391        }
392        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
393    }
394    if (result != VK_SUCCESS) {
395        for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
396            if (!cmdbufs[i])
397                break;
398            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
399        }
400    }
401    return result;
402}
403
404void FreeCommandBuffers(VkDevice /*device*/,
405                        VkCommandPool cmd_pool,
406                        uint32_t count,
407                        const VkCommandBuffer* cmdbufs) {
408    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
409    for (uint32_t i = 0; i < count; i++)
410        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
411}
412
413// -----------------------------------------------------------------------------
414// DeviceMemory
415
416struct DeviceMemory {
417    typedef VkDeviceMemory HandleType;
418    VkDeviceSize size;
419    alignas(16) uint8_t data[0];
420};
421DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
422
423VkResult AllocateMemory(VkDevice device,
424                        const VkMemoryAllocateInfo* alloc_info,
425                        const VkAllocationCallbacks* allocator,
426                        VkDeviceMemory* mem_handle) {
427    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
428        return VK_ERROR_OUT_OF_HOST_MEMORY;
429    if (!allocator)
430        allocator = &device->allocator;
431
432    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
433    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
434        allocator->pUserData, size, alignof(DeviceMemory),
435        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
436    if (!mem)
437        return VK_ERROR_OUT_OF_HOST_MEMORY;
438    mem->size = size;
439    *mem_handle = GetHandleToDeviceMemory(mem);
440    return VK_SUCCESS;
441}
442
443void FreeMemory(VkDevice device,
444                VkDeviceMemory mem_handle,
445                const VkAllocationCallbacks* allocator) {
446    if (!allocator)
447        allocator = &device->allocator;
448    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
449    allocator->pfnFree(allocator->pUserData, mem);
450}
451
452VkResult MapMemory(VkDevice,
453                   VkDeviceMemory mem_handle,
454                   VkDeviceSize offset,
455                   VkDeviceSize,
456                   VkMemoryMapFlags,
457                   void** out_ptr) {
458    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
459    *out_ptr = &mem->data[0] + offset;
460    return VK_SUCCESS;
461}
462
463// -----------------------------------------------------------------------------
464// Buffer
465
466struct Buffer {
467    typedef VkBuffer HandleType;
468    VkDeviceSize size;
469};
470DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
471
472VkResult CreateBuffer(VkDevice device,
473                      const VkBufferCreateInfo* create_info,
474                      const VkAllocationCallbacks* allocator,
475                      VkBuffer* buffer_handle) {
476    ALOGW_IF(create_info->size > kMaxDeviceMemory,
477             "CreateBuffer: requested size 0x%" PRIx64
478             " exceeds max device memory size 0x%" PRIx64,
479             create_info->size, kMaxDeviceMemory);
480    if (!allocator)
481        allocator = &device->allocator;
482    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
483        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
484        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
485    if (!buffer)
486        return VK_ERROR_OUT_OF_HOST_MEMORY;
487    buffer->size = create_info->size;
488    *buffer_handle = GetHandleToBuffer(buffer);
489    return VK_SUCCESS;
490}
491
492void GetBufferMemoryRequirements(VkDevice,
493                                 VkBuffer buffer_handle,
494                                 VkMemoryRequirements* requirements) {
495    Buffer* buffer = GetBufferFromHandle(buffer_handle);
496    requirements->size = buffer->size;
497    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
498    requirements->memoryTypeBits = 0x1;
499}
500
501void DestroyBuffer(VkDevice device,
502                   VkBuffer buffer_handle,
503                   const VkAllocationCallbacks* allocator) {
504    if (!allocator)
505        allocator = &device->allocator;
506    Buffer* buffer = GetBufferFromHandle(buffer_handle);
507    allocator->pfnFree(allocator->pUserData, buffer);
508}
509
510// -----------------------------------------------------------------------------
511// Image
512
513struct Image {
514    typedef VkImage HandleType;
515    VkDeviceSize size;
516};
517DEFINE_OBJECT_HANDLE_CONVERSION(Image)
518
519VkResult CreateImage(VkDevice device,
520                     const VkImageCreateInfo* create_info,
521                     const VkAllocationCallbacks* allocator,
522                     VkImage* image_handle) {
523    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
524        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
525        create_info->mipLevels != 1) {
526        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
527              create_info->imageType, create_info->format,
528              create_info->mipLevels);
529        return VK_ERROR_OUT_OF_HOST_MEMORY;
530    }
531
532    VkDeviceSize size =
533        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
534        create_info->arrayLayers * create_info->samples * 4u;
535    ALOGW_IF(size > kMaxDeviceMemory,
536             "CreateImage: image size 0x%" PRIx64
537             " exceeds max device memory size 0x%" PRIx64,
538             size, kMaxDeviceMemory);
539
540    if (!allocator)
541        allocator = &device->allocator;
542    Image* image = static_cast<Image*>(allocator->pfnAllocation(
543        allocator->pUserData, sizeof(Image), alignof(Image),
544        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
545    if (!image)
546        return VK_ERROR_OUT_OF_HOST_MEMORY;
547    image->size = size;
548    *image_handle = GetHandleToImage(image);
549    return VK_SUCCESS;
550}
551
552void GetImageMemoryRequirements(VkDevice,
553                                VkImage image_handle,
554                                VkMemoryRequirements* requirements) {
555    Image* image = GetImageFromHandle(image_handle);
556    requirements->size = image->size;
557    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
558    requirements->memoryTypeBits = 0x1;
559}
560
561void DestroyImage(VkDevice device,
562                  VkImage image_handle,
563                  const VkAllocationCallbacks* allocator) {
564    if (!allocator)
565        allocator = &device->allocator;
566    Image* image = GetImageFromHandle(image_handle);
567    allocator->pfnFree(allocator->pUserData, image);
568}
569
570// -----------------------------------------------------------------------------
571// No-op types
572
573VkResult CreateBufferView(VkDevice device,
574                          const VkBufferViewCreateInfo*,
575                          const VkAllocationCallbacks* /*allocator*/,
576                          VkBufferView* view) {
577    *view = AllocHandle(device, HandleType::kBufferView);
578    return VK_SUCCESS;
579}
580
581VkResult CreateDescriptorPool(VkDevice device,
582                              const VkDescriptorPoolCreateInfo*,
583                              const VkAllocationCallbacks* /*allocator*/,
584                              VkDescriptorPool* pool) {
585    *pool = AllocHandle(device, HandleType::kDescriptorPool);
586    return VK_SUCCESS;
587}
588
589VkResult AllocateDescriptorSets(VkDevice device,
590                                const VkDescriptorSetAllocateInfo* alloc_info,
591                                VkDescriptorSet* descriptor_sets) {
592    for (uint32_t i = 0; i < alloc_info->setLayoutCount; i++)
593        descriptor_sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
594    return VK_SUCCESS;
595}
596
597VkResult CreateDescriptorSetLayout(VkDevice device,
598                                   const VkDescriptorSetLayoutCreateInfo*,
599                                   const VkAllocationCallbacks* /*allocator*/,
600                                   VkDescriptorSetLayout* layout) {
601    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
602    return VK_SUCCESS;
603}
604
605VkResult CreateEvent(VkDevice device,
606                     const VkEventCreateInfo*,
607                     const VkAllocationCallbacks* /*allocator*/,
608                     VkEvent* event) {
609    *event = AllocHandle(device, HandleType::kEvent);
610    return VK_SUCCESS;
611}
612
613VkResult CreateFence(VkDevice device,
614                     const VkFenceCreateInfo*,
615                     const VkAllocationCallbacks* /*allocator*/,
616                     VkFence* fence) {
617    *fence = AllocHandle(device, HandleType::kFence);
618    return VK_SUCCESS;
619}
620
621VkResult CreateFramebuffer(VkDevice device,
622                           const VkFramebufferCreateInfo*,
623                           const VkAllocationCallbacks* /*allocator*/,
624                           VkFramebuffer* framebuffer) {
625    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
626    return VK_SUCCESS;
627}
628
629VkResult CreateImageView(VkDevice device,
630                         const VkImageViewCreateInfo*,
631                         const VkAllocationCallbacks* /*allocator*/,
632                         VkImageView* view) {
633    *view = AllocHandle(device, HandleType::kImageView);
634    return VK_SUCCESS;
635}
636
637VkResult CreateGraphicsPipelines(VkDevice device,
638                                 VkPipelineCache,
639                                 uint32_t count,
640                                 const VkGraphicsPipelineCreateInfo*,
641                                 const VkAllocationCallbacks* /*allocator*/,
642                                 VkPipeline* pipelines) {
643    for (uint32_t i = 0; i < count; i++)
644        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
645    return VK_SUCCESS;
646}
647
648VkResult CreateComputePipelines(VkDevice device,
649                                VkPipelineCache,
650                                uint32_t count,
651                                const VkComputePipelineCreateInfo*,
652                                const VkAllocationCallbacks* /*allocator*/,
653                                VkPipeline* pipelines) {
654    for (uint32_t i = 0; i < count; i++)
655        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
656    return VK_SUCCESS;
657}
658
659VkResult CreatePipelineCache(VkDevice device,
660                             const VkPipelineCacheCreateInfo*,
661                             const VkAllocationCallbacks* /*allocator*/,
662                             VkPipelineCache* cache) {
663    *cache = AllocHandle(device, HandleType::kPipelineCache);
664    return VK_SUCCESS;
665}
666
667VkResult CreatePipelineLayout(VkDevice device,
668                              const VkPipelineLayoutCreateInfo*,
669                              const VkAllocationCallbacks* /*allocator*/,
670                              VkPipelineLayout* layout) {
671    *layout = AllocHandle(device, HandleType::kPipelineLayout);
672    return VK_SUCCESS;
673}
674
675VkResult CreateQueryPool(VkDevice device,
676                         const VkQueryPoolCreateInfo*,
677                         const VkAllocationCallbacks* /*allocator*/,
678                         VkQueryPool* pool) {
679    *pool = AllocHandle(device, HandleType::kQueryPool);
680    return VK_SUCCESS;
681}
682
683VkResult CreateRenderPass(VkDevice device,
684                          const VkRenderPassCreateInfo*,
685                          const VkAllocationCallbacks* /*allocator*/,
686                          VkRenderPass* renderpass) {
687    *renderpass = AllocHandle(device, HandleType::kRenderPass);
688    return VK_SUCCESS;
689}
690
691VkResult CreateSampler(VkDevice device,
692                       const VkSamplerCreateInfo*,
693                       const VkAllocationCallbacks* /*allocator*/,
694                       VkSampler* sampler) {
695    *sampler = AllocHandle(device, HandleType::kSampler);
696    return VK_SUCCESS;
697}
698
699VkResult CreateSemaphore(VkDevice device,
700                         const VkSemaphoreCreateInfo*,
701                         const VkAllocationCallbacks* /*allocator*/,
702                         VkSemaphore* semaphore) {
703    *semaphore = AllocHandle(device, HandleType::kSemaphore);
704    return VK_SUCCESS;
705}
706
707VkResult CreateShaderModule(VkDevice device,
708                            const VkShaderModuleCreateInfo*,
709                            const VkAllocationCallbacks* /*allocator*/,
710                            VkShaderModule* module) {
711    *module = AllocHandle(device, HandleType::kShaderModule);
712    return VK_SUCCESS;
713}
714
715VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
716                                         VkFormat,
717                                         VkImageUsageFlags,
718                                         int* grallocUsage) {
719    // The null driver never reads or writes the gralloc buffer
720    *grallocUsage = 0;
721    return VK_SUCCESS;
722}
723
724VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) {
725    close(fence);
726    return VK_SUCCESS;
727}
728
729VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) {
730    *fence = -1;
731    return VK_SUCCESS;
732}
733
734// -----------------------------------------------------------------------------
735// No-op entrypoints
736
737// clang-format off
738#pragma clang diagnostic push
739#pragma clang diagnostic ignored "-Wunused-parameter"
740
741void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
742    ALOGV("TODO: vk%s", __FUNCTION__);
743}
744
745void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
746    ALOGV("TODO: vk%s", __FUNCTION__);
747}
748
749VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
750    ALOGV("TODO: vk%s", __FUNCTION__);
751    return VK_SUCCESS;
752}
753
754VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
755    ALOGV("TODO: vk%s", __FUNCTION__);
756    return VK_SUCCESS;
757}
758
759VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
760    ALOGV("TODO: vk%s", __FUNCTION__);
761    return VK_SUCCESS;
762}
763
764VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
765    ALOGV("TODO: vk%s", __FUNCTION__);
766    return VK_SUCCESS;
767}
768
769VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
770    return VK_SUCCESS;
771}
772
773VkResult QueueWaitIdle(VkQueue queue) {
774    ALOGV("TODO: vk%s", __FUNCTION__);
775    return VK_SUCCESS;
776}
777
778VkResult DeviceWaitIdle(VkDevice device) {
779    ALOGV("TODO: vk%s", __FUNCTION__);
780    return VK_SUCCESS;
781}
782
783void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
784}
785
786VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
787    ALOGV("TODO: vk%s", __FUNCTION__);
788    return VK_SUCCESS;
789}
790
791VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
792    ALOGV("TODO: vk%s", __FUNCTION__);
793    return VK_SUCCESS;
794}
795
796void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
797    ALOGV("TODO: vk%s", __FUNCTION__);
798}
799
800VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
801    return VK_SUCCESS;
802}
803
804VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
805    return VK_SUCCESS;
806}
807
808void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
809    ALOGV("TODO: vk%s", __FUNCTION__);
810}
811
812void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
813    ALOGV("TODO: vk%s", __FUNCTION__);
814}
815
816VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
817    ALOGV("TODO: vk%s", __FUNCTION__);
818    return VK_SUCCESS;
819}
820
821void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
822}
823
824VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
825    return VK_SUCCESS;
826}
827
828VkResult GetFenceStatus(VkDevice device, VkFence fence) {
829    ALOGV("TODO: vk%s", __FUNCTION__);
830    return VK_SUCCESS;
831}
832
833VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
834    return VK_SUCCESS;
835}
836
837void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
838}
839
840void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
841}
842
843VkResult GetEventStatus(VkDevice device, VkEvent event) {
844    ALOGV("TODO: vk%s", __FUNCTION__);
845    return VK_SUCCESS;
846}
847
848VkResult SetEvent(VkDevice device, VkEvent event) {
849    ALOGV("TODO: vk%s", __FUNCTION__);
850    return VK_SUCCESS;
851}
852
853VkResult ResetEvent(VkDevice device, VkEvent event) {
854    ALOGV("TODO: vk%s", __FUNCTION__);
855    return VK_SUCCESS;
856}
857
858void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
859}
860
861VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
862    ALOGV("TODO: vk%s", __FUNCTION__);
863    return VK_SUCCESS;
864}
865
866void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
867}
868
869void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
870    ALOGV("TODO: vk%s", __FUNCTION__);
871}
872
873void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
874}
875
876void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
877}
878
879void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
880}
881
882VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
883    ALOGV("TODO: vk%s", __FUNCTION__);
884    return VK_SUCCESS;
885}
886
887VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
888    ALOGV("TODO: vk%s", __FUNCTION__);
889    return VK_SUCCESS;
890}
891
892void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
893}
894
895void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
896}
897
898void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
899}
900
901void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
902}
903
904void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
905}
906
907VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
908    ALOGV("TODO: vk%s", __FUNCTION__);
909    return VK_SUCCESS;
910}
911
912void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
913    ALOGV("TODO: vk%s", __FUNCTION__);
914}
915
916VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
917    ALOGV("TODO: vk%s", __FUNCTION__);
918    return VK_SUCCESS;
919}
920
921void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
922}
923
924void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
925}
926
927void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
928    ALOGV("TODO: vk%s", __FUNCTION__);
929}
930
931VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
932    ALOGV("TODO: vk%s", __FUNCTION__);
933    return VK_SUCCESS;
934}
935
936VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
937    return VK_SUCCESS;
938}
939
940VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
941    return VK_SUCCESS;
942}
943
944VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
945    ALOGV("TODO: vk%s", __FUNCTION__);
946    return VK_SUCCESS;
947}
948
949void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
950}
951
952void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
953}
954
955void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
956}
957
958void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
959}
960
961void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
962}
963
964void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
965}
966
967void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
968}
969
970void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
971}
972
973void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
974}
975
976void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
977}
978
979void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
980}
981
982void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
983}
984
985void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
986}
987
988void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
989}
990
991void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
992}
993
994void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
995}
996
997void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
998}
999
1000void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1001}
1002
1003void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1004}
1005
1006void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1007}
1008
1009void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1010}
1011
1012void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1013}
1014
1015void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1016}
1017
1018void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1019}
1020
1021void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1022}
1023
1024void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1025}
1026
1027void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1028}
1029
1030void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1031}
1032
1033void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1034}
1035
1036void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1037}
1038
1039void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1040}
1041
1042void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1043}
1044
1045void CmdWaitEvents(VkCommandBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1046}
1047
1048void CmdPipelineBarrier(VkCommandBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkDependencyFlags dependencyFlags, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1049}
1050
1051void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1052}
1053
1054void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1055}
1056
1057void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1058}
1059
1060void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1061}
1062
1063void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1064}
1065
1066void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1067}
1068
1069void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1070}
1071
1072void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1073}
1074
1075void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1076}
1077
1078void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1079}
1080
1081#pragma clang diagnostic pop
1082// clang-format on
1083
1084}  // namespace null_driver
1085