null_driver.cpp revision 65ab552c18df3e94c5d275294a20dd4d20dda119
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCommandBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kDescriptorPool,
71    kDescriptorSet,
72    kDescriptorSetLayout,
73    kEvent,
74    kFence,
75    kFramebuffer,
76    kImageView,
77    kPipeline,
78    kPipelineCache,
79    kPipelineLayout,
80    kQueryPool,
81    kRenderPass,
82    kSampler,
83    kSemaphore,
84    kShader,
85    kShaderModule,
86
87    kNumTypes
88};
89}  // namespace HandleType
90uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
91
92const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
93
94}  // anonymous namespace
95
96struct VkDevice_T {
97    hwvulkan_dispatch_t dispatch;
98    VkAllocationCallbacks allocator;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133VkResult CreateInstance(const VkInstanceCreateInfo* /*create_info*/,
134                        const VkAllocationCallbacks* allocator,
135                        VkInstance* out_instance) {
136    // Assume the loader provided alloc callbacks even if the app didn't.
137    ALOG_ASSERT(
138        allocator,
139        "Missing alloc callbacks, loader or app should have provided them");
140
141    VkInstance_T* instance =
142        static_cast<VkInstance_T*>(allocator->pfnAllocation(
143            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
144            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
145    if (!instance)
146        return VK_ERROR_OUT_OF_HOST_MEMORY;
147
148    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
149    instance->allocator = *allocator;
150    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
151
152    *out_instance = instance;
153    return VK_SUCCESS;
154}
155
156int CloseDevice(struct hw_device_t* /*device*/) {
157    // nothing to do - opening a device doesn't allocate any resources
158    return 0;
159}
160
161hwvulkan_device_t nulldrv_device = {
162    .common =
163        {
164            .tag = HARDWARE_DEVICE_TAG,
165            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
166            .module = &HAL_MODULE_INFO_SYM.common,
167            .close = CloseDevice,
168        },
169    .EnumerateInstanceExtensionProperties =
170        EnumerateInstanceExtensionProperties,
171    .CreateInstance = CreateInstance,
172    .GetInstanceProcAddr = GetInstanceProcAddr};
173
174int OpenDevice(const hw_module_t* /*module*/,
175               const char* id,
176               hw_device_t** device) {
177    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
178        *device = &nulldrv_device.common;
179        return 0;
180    }
181    return -ENOENT;
182}
183
184VkInstance_T* GetInstanceFromPhysicalDevice(
185    VkPhysicalDevice_T* physical_device) {
186    return reinterpret_cast<VkInstance_T*>(
187        reinterpret_cast<uintptr_t>(physical_device) -
188        offsetof(VkInstance_T, physical_device));
189}
190
191uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
192    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
193    ALOGE_IF(device->next_handle[type] == kHandleMask,
194             "non-dispatchable handles of type=%u are about to overflow", type);
195    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
196           (device->next_handle[type]++ & kHandleMask);
197}
198
199}  // namespace
200
201namespace null_driver {
202
203#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
204    T* Get##T##FromHandle(Vk##T h);                     \
205    T* Get##T##FromHandle(Vk##T h) {                    \
206        return reinterpret_cast<T*>(uintptr_t(h));      \
207    }                                                   \
208    Vk##T GetHandleTo##T(const T* obj);                 \
209    Vk##T GetHandleTo##T(const T* obj) {                \
210        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
211    }
212
213// -----------------------------------------------------------------------------
214// Global
215
216VkResult EnumerateInstanceExtensionProperties(const char*,
217                                              uint32_t* count,
218                                              VkExtensionProperties*) {
219    *count = 0;
220    return VK_SUCCESS;
221}
222
223PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
224    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
225    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
226        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
227    return proc;
228}
229
230PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
231    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
232    if (proc)
233        return proc;
234    if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0)
235        return reinterpret_cast<PFN_vkVoidFunction>(
236            GetSwapchainGrallocUsageANDROID);
237    if (strcmp(name, "vkAcquireImageANDROID") == 0)
238        return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID);
239    if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0)
240        return reinterpret_cast<PFN_vkVoidFunction>(
241            QueueSignalReleaseImageANDROID);
242    return nullptr;
243}
244
245// -----------------------------------------------------------------------------
246// Instance
247
248void DestroyInstance(VkInstance instance,
249                     const VkAllocationCallbacks* /*allocator*/) {
250    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
251}
252
253// -----------------------------------------------------------------------------
254// PhysicalDevice
255
256VkResult EnumeratePhysicalDevices(VkInstance instance,
257                                  uint32_t* physical_device_count,
258                                  VkPhysicalDevice* physical_devices) {
259    if (physical_devices && *physical_device_count >= 1)
260        physical_devices[0] = &instance->physical_device;
261    *physical_device_count = 1;
262    return VK_SUCCESS;
263}
264
265void GetPhysicalDeviceProperties(VkPhysicalDevice,
266                                 VkPhysicalDeviceProperties* properties) {
267    properties->apiVersion = VK_API_VERSION;
268    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
269    properties->vendorID = 0;
270    properties->deviceID = 0;
271    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
272    strcpy(properties->deviceName, "Android Vulkan Null Driver");
273    memset(properties->pipelineCacheUUID, 0,
274           sizeof(properties->pipelineCacheUUID));
275}
276
277void GetPhysicalDeviceQueueFamilyProperties(
278    VkPhysicalDevice,
279    uint32_t* count,
280    VkQueueFamilyProperties* properties) {
281    if (properties) {
282        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
283                                 VK_QUEUE_TRANSFER_BIT;
284        properties->queueCount = 1;
285        properties->timestampValidBits = 64;
286    }
287    *count = 1;
288}
289
290void GetPhysicalDeviceMemoryProperties(
291    VkPhysicalDevice,
292    VkPhysicalDeviceMemoryProperties* properties) {
293    properties->memoryTypeCount = 1;
294    properties->memoryTypes[0].propertyFlags =
295        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
296        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
297        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
298        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
299    properties->memoryTypes[0].heapIndex = 0;
300    properties->memoryHeapCount = 1;
301    properties->memoryHeaps[0].size = kMaxDeviceMemory;
302    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
303}
304
305// -----------------------------------------------------------------------------
306// Device
307
308VkResult CreateDevice(VkPhysicalDevice physical_device,
309                      const VkDeviceCreateInfo*,
310                      const VkAllocationCallbacks* allocator,
311                      VkDevice* out_device) {
312    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
313    if (!allocator)
314        allocator = &instance->allocator;
315    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
316        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
317        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
318    if (!device)
319        return VK_ERROR_OUT_OF_HOST_MEMORY;
320
321    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
322    device->allocator = *allocator;
323    device->instance = instance;
324    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
325    std::fill(device->next_handle.begin(), device->next_handle.end(),
326              UINT64_C(0));
327
328    *out_device = device;
329    return VK_SUCCESS;
330}
331
332void DestroyDevice(VkDevice device,
333                   const VkAllocationCallbacks* /*allocator*/) {
334    if (!device)
335        return;
336    device->allocator.pfnFree(device->allocator.pUserData, device);
337}
338
339void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
340    *queue = &device->queue;
341}
342
343// -----------------------------------------------------------------------------
344// CommandPool
345
346struct CommandPool {
347    typedef VkCommandPool HandleType;
348    VkAllocationCallbacks allocator;
349};
350DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
351
352VkResult CreateCommandPool(VkDevice device,
353                           const VkCommandPoolCreateInfo* /*create_info*/,
354                           const VkAllocationCallbacks* allocator,
355                           VkCommandPool* cmd_pool) {
356    if (!allocator)
357        allocator = &device->allocator;
358    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
359        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
360        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
361    if (!pool)
362        return VK_ERROR_OUT_OF_HOST_MEMORY;
363    pool->allocator = *allocator;
364    *cmd_pool = GetHandleToCommandPool(pool);
365    return VK_SUCCESS;
366}
367
368void DestroyCommandPool(VkDevice /*device*/,
369                        VkCommandPool cmd_pool,
370                        const VkAllocationCallbacks* /*allocator*/) {
371    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
372    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
373}
374
375// -----------------------------------------------------------------------------
376// CmdBuffer
377
378VkResult AllocateCommandBuffers(VkDevice /*device*/,
379                                const VkCommandBufferAllocateInfo* alloc_info,
380                                VkCommandBuffer* cmdbufs) {
381    VkResult result = VK_SUCCESS;
382    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
383    std::fill(cmdbufs, cmdbufs + alloc_info->bufferCount, nullptr);
384    for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
385        cmdbufs[i] =
386            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
387                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
388                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
389        if (!cmdbufs[i]) {
390            result = VK_ERROR_OUT_OF_HOST_MEMORY;
391            break;
392        }
393        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
394    }
395    if (result != VK_SUCCESS) {
396        for (uint32_t i = 0; i < alloc_info->bufferCount; i++) {
397            if (!cmdbufs[i])
398                break;
399            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
400        }
401    }
402    return result;
403}
404
405void FreeCommandBuffers(VkDevice /*device*/,
406                        VkCommandPool cmd_pool,
407                        uint32_t count,
408                        const VkCommandBuffer* cmdbufs) {
409    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
410    for (uint32_t i = 0; i < count; i++)
411        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
412}
413
414// -----------------------------------------------------------------------------
415// DeviceMemory
416
417struct DeviceMemory {
418    typedef VkDeviceMemory HandleType;
419    VkDeviceSize size;
420    alignas(16) uint8_t data[0];
421};
422DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
423
424VkResult AllocateMemory(VkDevice device,
425                        const VkMemoryAllocateInfo* alloc_info,
426                        const VkAllocationCallbacks* allocator,
427                        VkDeviceMemory* mem_handle) {
428    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
429        return VK_ERROR_OUT_OF_HOST_MEMORY;
430    if (!allocator)
431        allocator = &device->allocator;
432
433    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
434    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
435        allocator->pUserData, size, alignof(DeviceMemory),
436        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
437    if (!mem)
438        return VK_ERROR_OUT_OF_HOST_MEMORY;
439    mem->size = size;
440    *mem_handle = GetHandleToDeviceMemory(mem);
441    return VK_SUCCESS;
442}
443
444void FreeMemory(VkDevice device,
445                VkDeviceMemory mem_handle,
446                const VkAllocationCallbacks* allocator) {
447    if (!allocator)
448        allocator = &device->allocator;
449    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
450    allocator->pfnFree(allocator->pUserData, mem);
451}
452
453VkResult MapMemory(VkDevice,
454                   VkDeviceMemory mem_handle,
455                   VkDeviceSize offset,
456                   VkDeviceSize,
457                   VkMemoryMapFlags,
458                   void** out_ptr) {
459    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
460    *out_ptr = &mem->data[0] + offset;
461    return VK_SUCCESS;
462}
463
464// -----------------------------------------------------------------------------
465// Buffer
466
467struct Buffer {
468    typedef VkBuffer HandleType;
469    VkDeviceSize size;
470};
471DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
472
473VkResult CreateBuffer(VkDevice device,
474                      const VkBufferCreateInfo* create_info,
475                      const VkAllocationCallbacks* allocator,
476                      VkBuffer* buffer_handle) {
477    ALOGW_IF(create_info->size > kMaxDeviceMemory,
478             "CreateBuffer: requested size 0x%" PRIx64
479             " exceeds max device memory size 0x%" PRIx64,
480             create_info->size, kMaxDeviceMemory);
481    if (!allocator)
482        allocator = &device->allocator;
483    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
484        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
485        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
486    if (!buffer)
487        return VK_ERROR_OUT_OF_HOST_MEMORY;
488    buffer->size = create_info->size;
489    *buffer_handle = GetHandleToBuffer(buffer);
490    return VK_SUCCESS;
491}
492
493void GetBufferMemoryRequirements(VkDevice,
494                                 VkBuffer buffer_handle,
495                                 VkMemoryRequirements* requirements) {
496    Buffer* buffer = GetBufferFromHandle(buffer_handle);
497    requirements->size = buffer->size;
498    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
499    requirements->memoryTypeBits = 0x1;
500}
501
502void DestroyBuffer(VkDevice device,
503                   VkBuffer buffer_handle,
504                   const VkAllocationCallbacks* allocator) {
505    if (!allocator)
506        allocator = &device->allocator;
507    Buffer* buffer = GetBufferFromHandle(buffer_handle);
508    allocator->pfnFree(allocator->pUserData, buffer);
509}
510
511// -----------------------------------------------------------------------------
512// Image
513
514struct Image {
515    typedef VkImage HandleType;
516    VkDeviceSize size;
517};
518DEFINE_OBJECT_HANDLE_CONVERSION(Image)
519
520VkResult CreateImage(VkDevice device,
521                     const VkImageCreateInfo* create_info,
522                     const VkAllocationCallbacks* allocator,
523                     VkImage* image_handle) {
524    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
525        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
526        create_info->mipLevels != 1) {
527        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
528              create_info->imageType, create_info->format,
529              create_info->mipLevels);
530        return VK_ERROR_OUT_OF_HOST_MEMORY;
531    }
532
533    VkDeviceSize size =
534        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
535        create_info->arrayLayers * create_info->samples * 4u;
536    ALOGW_IF(size > kMaxDeviceMemory,
537             "CreateImage: image size 0x%" PRIx64
538             " exceeds max device memory size 0x%" PRIx64,
539             size, kMaxDeviceMemory);
540
541    if (!allocator)
542        allocator = &device->allocator;
543    Image* image = static_cast<Image*>(allocator->pfnAllocation(
544        allocator->pUserData, sizeof(Image), alignof(Image),
545        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
546    if (!image)
547        return VK_ERROR_OUT_OF_HOST_MEMORY;
548    image->size = size;
549    *image_handle = GetHandleToImage(image);
550    return VK_SUCCESS;
551}
552
553void GetImageMemoryRequirements(VkDevice,
554                                VkImage image_handle,
555                                VkMemoryRequirements* requirements) {
556    Image* image = GetImageFromHandle(image_handle);
557    requirements->size = image->size;
558    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
559    requirements->memoryTypeBits = 0x1;
560}
561
562void DestroyImage(VkDevice device,
563                  VkImage image_handle,
564                  const VkAllocationCallbacks* allocator) {
565    if (!allocator)
566        allocator = &device->allocator;
567    Image* image = GetImageFromHandle(image_handle);
568    allocator->pfnFree(allocator->pUserData, image);
569}
570
571// -----------------------------------------------------------------------------
572// No-op types
573
574VkResult CreateBufferView(VkDevice device,
575                          const VkBufferViewCreateInfo*,
576                          const VkAllocationCallbacks* /*allocator*/,
577                          VkBufferView* view) {
578    *view = AllocHandle(device, HandleType::kBufferView);
579    return VK_SUCCESS;
580}
581
582VkResult CreateDescriptorPool(VkDevice device,
583                              const VkDescriptorPoolCreateInfo*,
584                              const VkAllocationCallbacks* /*allocator*/,
585                              VkDescriptorPool* pool) {
586    *pool = AllocHandle(device, HandleType::kDescriptorPool);
587    return VK_SUCCESS;
588}
589
590VkResult AllocateDescriptorSets(VkDevice device,
591                                const VkDescriptorSetAllocateInfo* alloc_info,
592                                VkDescriptorSet* descriptor_sets) {
593    for (uint32_t i = 0; i < alloc_info->setLayoutCount; i++)
594        descriptor_sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
595    return VK_SUCCESS;
596}
597
598VkResult CreateDescriptorSetLayout(VkDevice device,
599                                   const VkDescriptorSetLayoutCreateInfo*,
600                                   const VkAllocationCallbacks* /*allocator*/,
601                                   VkDescriptorSetLayout* layout) {
602    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
603    return VK_SUCCESS;
604}
605
606VkResult CreateEvent(VkDevice device,
607                     const VkEventCreateInfo*,
608                     const VkAllocationCallbacks* /*allocator*/,
609                     VkEvent* event) {
610    *event = AllocHandle(device, HandleType::kEvent);
611    return VK_SUCCESS;
612}
613
614VkResult CreateFence(VkDevice device,
615                     const VkFenceCreateInfo*,
616                     const VkAllocationCallbacks* /*allocator*/,
617                     VkFence* fence) {
618    *fence = AllocHandle(device, HandleType::kFence);
619    return VK_SUCCESS;
620}
621
622VkResult CreateFramebuffer(VkDevice device,
623                           const VkFramebufferCreateInfo*,
624                           const VkAllocationCallbacks* /*allocator*/,
625                           VkFramebuffer* framebuffer) {
626    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
627    return VK_SUCCESS;
628}
629
630VkResult CreateImageView(VkDevice device,
631                         const VkImageViewCreateInfo*,
632                         const VkAllocationCallbacks* /*allocator*/,
633                         VkImageView* view) {
634    *view = AllocHandle(device, HandleType::kImageView);
635    return VK_SUCCESS;
636}
637
638VkResult CreateGraphicsPipelines(VkDevice device,
639                                 VkPipelineCache,
640                                 uint32_t count,
641                                 const VkGraphicsPipelineCreateInfo*,
642                                 const VkAllocationCallbacks* /*allocator*/,
643                                 VkPipeline* pipelines) {
644    for (uint32_t i = 0; i < count; i++)
645        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
646    return VK_SUCCESS;
647}
648
649VkResult CreateComputePipelines(VkDevice device,
650                                VkPipelineCache,
651                                uint32_t count,
652                                const VkComputePipelineCreateInfo*,
653                                const VkAllocationCallbacks* /*allocator*/,
654                                VkPipeline* pipelines) {
655    for (uint32_t i = 0; i < count; i++)
656        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
657    return VK_SUCCESS;
658}
659
660VkResult CreatePipelineCache(VkDevice device,
661                             const VkPipelineCacheCreateInfo*,
662                             const VkAllocationCallbacks* /*allocator*/,
663                             VkPipelineCache* cache) {
664    *cache = AllocHandle(device, HandleType::kPipelineCache);
665    return VK_SUCCESS;
666}
667
668VkResult CreatePipelineLayout(VkDevice device,
669                              const VkPipelineLayoutCreateInfo*,
670                              const VkAllocationCallbacks* /*allocator*/,
671                              VkPipelineLayout* layout) {
672    *layout = AllocHandle(device, HandleType::kPipelineLayout);
673    return VK_SUCCESS;
674}
675
676VkResult CreateQueryPool(VkDevice device,
677                         const VkQueryPoolCreateInfo*,
678                         const VkAllocationCallbacks* /*allocator*/,
679                         VkQueryPool* pool) {
680    *pool = AllocHandle(device, HandleType::kQueryPool);
681    return VK_SUCCESS;
682}
683
684VkResult CreateRenderPass(VkDevice device,
685                          const VkRenderPassCreateInfo*,
686                          const VkAllocationCallbacks* /*allocator*/,
687                          VkRenderPass* renderpass) {
688    *renderpass = AllocHandle(device, HandleType::kRenderPass);
689    return VK_SUCCESS;
690}
691
692VkResult CreateSampler(VkDevice device,
693                       const VkSamplerCreateInfo*,
694                       const VkAllocationCallbacks* /*allocator*/,
695                       VkSampler* sampler) {
696    *sampler = AllocHandle(device, HandleType::kSampler);
697    return VK_SUCCESS;
698}
699
700VkResult CreateSemaphore(VkDevice device,
701                         const VkSemaphoreCreateInfo*,
702                         const VkAllocationCallbacks* /*allocator*/,
703                         VkSemaphore* semaphore) {
704    *semaphore = AllocHandle(device, HandleType::kSemaphore);
705    return VK_SUCCESS;
706}
707
708VkResult CreateShader(VkDevice device,
709                      const VkShaderCreateInfo*,
710                      const VkAllocationCallbacks* /*allocator*/,
711                      VkShader* shader) {
712    *shader = AllocHandle(device, HandleType::kShader);
713    return VK_SUCCESS;
714}
715
716VkResult CreateShaderModule(VkDevice device,
717                            const VkShaderModuleCreateInfo*,
718                            const VkAllocationCallbacks* /*allocator*/,
719                            VkShaderModule* module) {
720    *module = AllocHandle(device, HandleType::kShaderModule);
721    return VK_SUCCESS;
722}
723
724VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
725                                         VkFormat,
726                                         VkImageUsageFlags,
727                                         int* grallocUsage) {
728    // The null driver never reads or writes the gralloc buffer
729    *grallocUsage = 0;
730    return VK_SUCCESS;
731}
732
733VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) {
734    close(fence);
735    return VK_SUCCESS;
736}
737
738VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) {
739    *fence = -1;
740    return VK_SUCCESS;
741}
742
743// -----------------------------------------------------------------------------
744// No-op entrypoints
745
746// clang-format off
747#pragma clang diagnostic push
748#pragma clang diagnostic ignored "-Wunused-parameter"
749
750void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
751    ALOGV("TODO: vk%s", __FUNCTION__);
752}
753
754void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
755    ALOGV("TODO: vk%s", __FUNCTION__);
756}
757
758void GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
759    ALOGV("TODO: vk%s", __FUNCTION__);
760}
761
762VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
763    ALOGV("TODO: vk%s", __FUNCTION__);
764    return VK_SUCCESS;
765}
766
767VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
768    ALOGV("TODO: vk%s", __FUNCTION__);
769    return VK_SUCCESS;
770}
771
772VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
773    ALOGV("TODO: vk%s", __FUNCTION__);
774    return VK_SUCCESS;
775}
776
777VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
778    return VK_SUCCESS;
779}
780
781VkResult QueueWaitIdle(VkQueue queue) {
782    ALOGV("TODO: vk%s", __FUNCTION__);
783    return VK_SUCCESS;
784}
785
786VkResult DeviceWaitIdle(VkDevice device) {
787    ALOGV("TODO: vk%s", __FUNCTION__);
788    return VK_SUCCESS;
789}
790
791void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
792}
793
794VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
795    ALOGV("TODO: vk%s", __FUNCTION__);
796    return VK_SUCCESS;
797}
798
799VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
800    ALOGV("TODO: vk%s", __FUNCTION__);
801    return VK_SUCCESS;
802}
803
804void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
805    ALOGV("TODO: vk%s", __FUNCTION__);
806}
807
808VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
809    return VK_SUCCESS;
810}
811
812VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
813    return VK_SUCCESS;
814}
815
816void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
817    ALOGV("TODO: vk%s", __FUNCTION__);
818}
819
820void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
821    ALOGV("TODO: vk%s", __FUNCTION__);
822}
823
824VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
825    ALOGV("TODO: vk%s", __FUNCTION__);
826    return VK_SUCCESS;
827}
828
829void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
830}
831
832VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
833    return VK_SUCCESS;
834}
835
836VkResult GetFenceStatus(VkDevice device, VkFence fence) {
837    ALOGV("TODO: vk%s", __FUNCTION__);
838    return VK_SUCCESS;
839}
840
841VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
842    return VK_SUCCESS;
843}
844
845void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
846}
847
848void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
849}
850
851VkResult GetEventStatus(VkDevice device, VkEvent event) {
852    ALOGV("TODO: vk%s", __FUNCTION__);
853    return VK_SUCCESS;
854}
855
856VkResult SetEvent(VkDevice device, VkEvent event) {
857    ALOGV("TODO: vk%s", __FUNCTION__);
858    return VK_SUCCESS;
859}
860
861VkResult ResetEvent(VkDevice device, VkEvent event) {
862    ALOGV("TODO: vk%s", __FUNCTION__);
863    return VK_SUCCESS;
864}
865
866void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
867}
868
869VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
870    ALOGV("TODO: vk%s", __FUNCTION__);
871    return VK_SUCCESS;
872}
873
874void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
875}
876
877void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
878    ALOGV("TODO: vk%s", __FUNCTION__);
879}
880
881void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
882}
883
884void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
885}
886
887void DestroyShader(VkDevice device, VkShader shader, const VkAllocationCallbacks* allocator) {
888}
889
890void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
891}
892
893VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
894    ALOGV("TODO: vk%s", __FUNCTION__);
895    return VK_SUCCESS;
896}
897
898VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
899    ALOGV("TODO: vk%s", __FUNCTION__);
900    return VK_SUCCESS;
901}
902
903void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
904}
905
906void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
907}
908
909void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
910}
911
912void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
913}
914
915void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
916}
917
918VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
919    ALOGV("TODO: vk%s", __FUNCTION__);
920    return VK_SUCCESS;
921}
922
923void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
924    ALOGV("TODO: vk%s", __FUNCTION__);
925}
926
927VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
928    ALOGV("TODO: vk%s", __FUNCTION__);
929    return VK_SUCCESS;
930}
931
932void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
933}
934
935void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
936}
937
938void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
939    ALOGV("TODO: vk%s", __FUNCTION__);
940}
941
942VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
943    ALOGV("TODO: vk%s", __FUNCTION__);
944    return VK_SUCCESS;
945}
946
947VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
948    return VK_SUCCESS;
949}
950
951VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
952    return VK_SUCCESS;
953}
954
955VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
956    ALOGV("TODO: vk%s", __FUNCTION__);
957    return VK_SUCCESS;
958}
959
960void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
961}
962
963void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
964}
965
966void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
967}
968
969void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
970}
971
972void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
973}
974
975void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
976}
977
978void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
979}
980
981void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
982}
983
984void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
985}
986
987void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
988}
989
990void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
991}
992
993void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
994}
995
996void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
997}
998
999void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1000}
1001
1002void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1003}
1004
1005void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1006}
1007
1008void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1009}
1010
1011void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1012}
1013
1014void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1015}
1016
1017void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1018}
1019
1020void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1021}
1022
1023void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1024}
1025
1026void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1027}
1028
1029void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1030}
1031
1032void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1033}
1034
1035void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1036}
1037
1038void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1039}
1040
1041void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1042}
1043
1044void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1045}
1046
1047void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1048}
1049
1050void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1051}
1052
1053void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1054}
1055
1056void CmdWaitEvents(VkCommandBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1057}
1058
1059void CmdPipelineBarrier(VkCommandBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkDependencyFlags dependencyFlags, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1060}
1061
1062void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1063}
1064
1065void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1066}
1067
1068void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1069}
1070
1071void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1072}
1073
1074void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1075}
1076
1077void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1078}
1079
1080void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1081}
1082
1083void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1084}
1085
1086void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1087}
1088
1089void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1090}
1091
1092#pragma clang diagnostic pop
1093// clang-format on
1094
1095}  // namespace null_driver
1096