null_driver.cpp revision a3a7a1d37bdfad56245b75edac49f8aceded321d
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    const VkAllocCallbacks* alloc;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCmdBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kCmdPool,
71    kDescriptorPool,
72    kDescriptorSet,
73    kDescriptorSetLayout,
74    kEvent,
75    kFence,
76    kFramebuffer,
77    kImageView,
78    kPipeline,
79    kPipelineCache,
80    kPipelineLayout,
81    kQueryPool,
82    kRenderPass,
83    kSampler,
84    kSemaphore,
85    kShader,
86    kShaderModule,
87
88    kNumTypes
89};
90}  // namespace HandleType
91uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
92
93const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
94
95}  // anonymous namespace
96
97struct VkDevice_T {
98    hwvulkan_dispatch_t dispatch;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
134                        VkInstance* out_instance) {
135    // Assume the loader provided alloc callbacks even if the app didn't.
136    ALOG_ASSERT(
137        create_info->pAllocCb,
138        "Missing alloc callbacks, loader or app should have provided them");
139
140    VkInstance_T* instance =
141        static_cast<VkInstance_T*>(create_info->pAllocCb->pfnAlloc(
142            create_info->pAllocCb->pUserData, sizeof(VkInstance_T),
143            alignof(VkInstance_T), VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
144    if (!instance)
145        return VK_ERROR_OUT_OF_HOST_MEMORY;
146
147    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
148    instance->alloc = create_info->pAllocCb;
149    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
150
151    *out_instance = instance;
152    return VK_SUCCESS;
153}
154
155int CloseDevice(struct hw_device_t* /*device*/) {
156    // nothing to do - opening a device doesn't allocate any resources
157    return 0;
158}
159
160hwvulkan_device_t nulldrv_device = {
161    .common =
162        {
163            .tag = HARDWARE_DEVICE_TAG,
164            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
165            .module = &HAL_MODULE_INFO_SYM.common,
166            .close = CloseDevice,
167        },
168    .EnumerateInstanceExtensionProperties =
169        EnumerateInstanceExtensionProperties,
170    .CreateInstance = CreateInstance,
171    .GetInstanceProcAddr = GetInstanceProcAddr};
172
173int OpenDevice(const hw_module_t* /*module*/,
174               const char* id,
175               hw_device_t** device) {
176    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
177        *device = &nulldrv_device.common;
178        return 0;
179    }
180    return -ENOENT;
181}
182
183VkInstance_T* GetInstanceFromPhysicalDevice(
184    VkPhysicalDevice_T* physical_device) {
185    return reinterpret_cast<VkInstance_T*>(
186        reinterpret_cast<uintptr_t>(physical_device) -
187        offsetof(VkInstance_T, physical_device));
188}
189
190uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
191    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
192    ALOGE_IF(device->next_handle[type] == kHandleMask,
193             "non-dispatchable handles of type=%u are about to overflow", type);
194    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
195           (device->next_handle[type]++ & kHandleMask);
196}
197
198}  // namespace
199
200namespace null_driver {
201
202#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
203    T* Get##T##FromHandle(Vk##T h);                     \
204    T* Get##T##FromHandle(Vk##T h) {                    \
205        return reinterpret_cast<T*>(uintptr_t(h));      \
206    }                                                   \
207    Vk##T GetHandleTo##T(const T* obj);                 \
208    Vk##T GetHandleTo##T(const T* obj) {                \
209        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
210    }
211
212// -----------------------------------------------------------------------------
213// Global
214
215VkResult EnumerateInstanceExtensionProperties(const char*,
216                                              uint32_t* count,
217                                              VkExtensionProperties*) {
218    *count = 0;
219    return VK_SUCCESS;
220}
221
222PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
223    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
224    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
225        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
226    return proc;
227}
228
229PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
230    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
231    if (proc)
232        return proc;
233    if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0)
234        return reinterpret_cast<PFN_vkVoidFunction>(
235            GetSwapchainGrallocUsageANDROID);
236    if (strcmp(name, "vkAcquireImageANDROID") == 0)
237        return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID);
238    if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0)
239        return reinterpret_cast<PFN_vkVoidFunction>(
240            QueueSignalReleaseImageANDROID);
241    return nullptr;
242}
243
244// -----------------------------------------------------------------------------
245// Instance
246
247void DestroyInstance(VkInstance instance) {
248    instance->alloc->pfnFree(instance->alloc->pUserData, instance);
249}
250
251// -----------------------------------------------------------------------------
252// PhysicalDevice
253
254VkResult EnumeratePhysicalDevices(VkInstance instance,
255                                  uint32_t* physical_device_count,
256                                  VkPhysicalDevice* physical_devices) {
257    if (physical_devices && *physical_device_count >= 1)
258        physical_devices[0] = &instance->physical_device;
259    *physical_device_count = 1;
260    return VK_SUCCESS;
261}
262
263void GetPhysicalDeviceProperties(VkPhysicalDevice,
264                                 VkPhysicalDeviceProperties* properties) {
265    properties->apiVersion = VK_API_VERSION;
266    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
267    properties->vendorId = 0;
268    properties->deviceId = 0;
269    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
270    strcpy(properties->deviceName, "Android Vulkan Null Driver");
271    memset(properties->pipelineCacheUUID, 0,
272           sizeof(properties->pipelineCacheUUID));
273}
274
275void GetPhysicalDeviceQueueFamilyProperties(
276    VkPhysicalDevice,
277    uint32_t* count,
278    VkQueueFamilyProperties* properties) {
279    if (properties) {
280        properties->queueFlags =
281            VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_DMA_BIT;
282        properties->queueCount = 1;
283        properties->timestampValidBits = 64;
284    }
285    *count = 1;
286}
287
288void GetPhysicalDeviceMemoryProperties(
289    VkPhysicalDevice,
290    VkPhysicalDeviceMemoryProperties* properties) {
291    properties->memoryTypeCount = 1;
292    properties->memoryTypes[0].propertyFlags =
293        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
294    properties->memoryTypes[0].heapIndex = 0;
295    properties->memoryHeapCount = 1;
296    properties->memoryHeaps[0].size = kMaxDeviceMemory;
297    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_HOST_LOCAL_BIT;
298}
299
300// -----------------------------------------------------------------------------
301// Device
302
303VkResult CreateDevice(VkPhysicalDevice physical_device,
304                      const VkDeviceCreateInfo*,
305                      VkDevice* out_device) {
306    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
307    VkDevice_T* device = static_cast<VkDevice_T*>(instance->alloc->pfnAlloc(
308        instance->alloc->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
309        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
310    if (!device)
311        return VK_ERROR_OUT_OF_HOST_MEMORY;
312
313    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
314    device->instance = instance;
315    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
316    std::fill(device->next_handle.begin(), device->next_handle.end(),
317              UINT64_C(0));
318
319    *out_device = device;
320    return VK_SUCCESS;
321}
322
323void DestroyDevice(VkDevice device) {
324    if (!device)
325        return;
326    const VkAllocCallbacks* alloc = device->instance->alloc;
327    alloc->pfnFree(alloc->pUserData, device);
328}
329
330void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
331    *queue = &device->queue;
332}
333
334// -----------------------------------------------------------------------------
335// CmdBuffer
336
337VkResult AllocCommandBuffers(VkDevice device,
338                             const VkCmdBufferAllocInfo* alloc_info,
339                             VkCmdBuffer* cmdbufs) {
340    VkResult result = VK_SUCCESS;
341    const VkAllocCallbacks* alloc = device->instance->alloc;
342
343    std::fill(cmdbufs, cmdbufs + alloc_info->count, nullptr);
344    for (uint32_t i = 0; i < alloc_info->count; i++) {
345        cmdbufs[i] = static_cast<VkCmdBuffer_T*>(alloc->pfnAlloc(
346            alloc->pUserData, sizeof(VkCmdBuffer_T), alignof(VkCmdBuffer_T),
347            VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
348        if (!cmdbufs[i]) {
349            result = VK_ERROR_OUT_OF_HOST_MEMORY;
350            break;
351        }
352        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
353    }
354    if (result != VK_SUCCESS) {
355        for (uint32_t i = 0; i < alloc_info->count; i++) {
356            if (!cmdbufs[i])
357                break;
358            alloc->pfnFree(alloc->pUserData, cmdbufs[i]);
359        }
360    }
361
362    return result;
363}
364
365void FreeCommandBuffers(VkDevice device,
366                        VkCmdPool,
367                        uint32_t count,
368                        const VkCmdBuffer* cmdbufs) {
369    const VkAllocCallbacks* alloc = device->instance->alloc;
370    for (uint32_t i = 0; i < count; i++)
371        alloc->pfnFree(alloc->pUserData, cmdbufs[i]);
372}
373
374// -----------------------------------------------------------------------------
375// DeviceMemory
376
377struct DeviceMemory {
378    typedef VkDeviceMemory HandleType;
379    VkDeviceSize size;
380    alignas(16) uint8_t data[0];
381};
382DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
383
384VkResult AllocMemory(VkDevice device,
385                     const VkMemoryAllocInfo* alloc_info,
386                     VkDeviceMemory* mem_handle) {
387    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
388        return VK_ERROR_OUT_OF_HOST_MEMORY;
389
390    const VkAllocCallbacks* alloc = device->instance->alloc;
391    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
392    DeviceMemory* mem = static_cast<DeviceMemory*>(
393        alloc->pfnAlloc(alloc->pUserData, size, alignof(DeviceMemory),
394                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
395    if (!mem)
396        return VK_ERROR_OUT_OF_HOST_MEMORY;
397    mem->size = size;
398    *mem_handle = GetHandleToDeviceMemory(mem);
399    return VK_SUCCESS;
400}
401
402void FreeMemory(VkDevice device, VkDeviceMemory mem_handle) {
403    const VkAllocCallbacks* alloc = device->instance->alloc;
404    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
405    alloc->pfnFree(alloc->pUserData, mem);
406}
407
408VkResult MapMemory(VkDevice,
409                   VkDeviceMemory mem_handle,
410                   VkDeviceSize offset,
411                   VkDeviceSize,
412                   VkMemoryMapFlags,
413                   void** out_ptr) {
414    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
415    *out_ptr = &mem->data[0] + offset;
416    return VK_SUCCESS;
417}
418
419// -----------------------------------------------------------------------------
420// Buffer
421
422struct Buffer {
423    typedef VkBuffer HandleType;
424    VkDeviceSize size;
425};
426DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
427
428VkResult CreateBuffer(VkDevice device,
429                      const VkBufferCreateInfo* create_info,
430                      VkBuffer* buffer_handle) {
431    ALOGW_IF(create_info->size > kMaxDeviceMemory,
432             "CreateBuffer: requested size 0x%" PRIx64
433             " exceeds max device memory size 0x%" PRIx64,
434             create_info->size, kMaxDeviceMemory);
435
436    const VkAllocCallbacks* alloc = device->instance->alloc;
437    Buffer* buffer = static_cast<Buffer*>(
438        alloc->pfnAlloc(alloc->pUserData, sizeof(Buffer), alignof(Buffer),
439                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
440    if (!buffer)
441        return VK_ERROR_OUT_OF_HOST_MEMORY;
442    buffer->size = create_info->size;
443    *buffer_handle = GetHandleToBuffer(buffer);
444    return VK_SUCCESS;
445}
446
447void GetBufferMemoryRequirements(VkDevice,
448                                 VkBuffer buffer_handle,
449                                 VkMemoryRequirements* requirements) {
450    Buffer* buffer = GetBufferFromHandle(buffer_handle);
451    requirements->size = buffer->size;
452    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
453    requirements->memoryTypeBits = 0x1;
454}
455
456void DestroyBuffer(VkDevice device, VkBuffer buffer_handle) {
457    const VkAllocCallbacks* alloc = device->instance->alloc;
458    Buffer* buffer = GetBufferFromHandle(buffer_handle);
459    alloc->pfnFree(alloc->pUserData, buffer);
460}
461
462// -----------------------------------------------------------------------------
463// Image
464
465struct Image {
466    typedef VkImage HandleType;
467    VkDeviceSize size;
468};
469DEFINE_OBJECT_HANDLE_CONVERSION(Image)
470
471VkResult CreateImage(VkDevice device,
472                     const VkImageCreateInfo* create_info,
473                     VkImage* image_handle) {
474    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
475        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
476        create_info->mipLevels != 1) {
477        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
478              create_info->imageType, create_info->format,
479              create_info->mipLevels);
480        return VK_ERROR_OUT_OF_HOST_MEMORY;
481    }
482
483    VkDeviceSize size =
484        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
485        create_info->arrayLayers * create_info->samples * 4u;
486    ALOGW_IF(size > kMaxDeviceMemory,
487             "CreateImage: image size 0x%" PRIx64
488             " exceeds max device memory size 0x%" PRIx64,
489             size, kMaxDeviceMemory);
490
491    const VkAllocCallbacks* alloc = device->instance->alloc;
492    Image* image = static_cast<Image*>(
493        alloc->pfnAlloc(alloc->pUserData, sizeof(Image), alignof(Image),
494                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
495    if (!image)
496        return VK_ERROR_OUT_OF_HOST_MEMORY;
497    image->size = size;
498    *image_handle = GetHandleToImage(image);
499    return VK_SUCCESS;
500}
501
502void GetImageMemoryRequirements(VkDevice,
503                                VkImage image_handle,
504                                VkMemoryRequirements* requirements) {
505    Image* image = GetImageFromHandle(image_handle);
506    requirements->size = image->size;
507    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
508    requirements->memoryTypeBits = 0x1;
509}
510
511void DestroyImage(VkDevice device, VkImage image_handle) {
512    const VkAllocCallbacks* alloc = device->instance->alloc;
513    Image* image = GetImageFromHandle(image_handle);
514    alloc->pfnFree(alloc->pUserData, image);
515}
516
517// -----------------------------------------------------------------------------
518// No-op types
519
520VkResult CreateBufferView(VkDevice device,
521                          const VkBufferViewCreateInfo*,
522                          VkBufferView* view) {
523    *view = AllocHandle(device, HandleType::kBufferView);
524    return VK_SUCCESS;
525}
526
527VkResult CreateCommandPool(VkDevice device,
528                           const VkCmdPoolCreateInfo*,
529                           VkCmdPool* pool) {
530    *pool = AllocHandle(device, HandleType::kCmdPool);
531    return VK_SUCCESS;
532}
533
534VkResult CreateDescriptorPool(VkDevice device,
535                              const VkDescriptorPoolCreateInfo*,
536                              VkDescriptorPool* pool) {
537    *pool = AllocHandle(device, HandleType::kDescriptorPool);
538    return VK_SUCCESS;
539}
540
541VkResult AllocDescriptorSets(VkDevice device,
542                             const VkDescriptorSetAllocInfo* alloc_info,
543                             VkDescriptorSet* descriptor_sets) {
544    for (uint32_t i = 0; i < alloc_info->count; i++)
545        descriptor_sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
546    return VK_SUCCESS;
547}
548
549VkResult CreateDescriptorSetLayout(VkDevice device,
550                                   const VkDescriptorSetLayoutCreateInfo*,
551                                   VkDescriptorSetLayout* layout) {
552    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
553    return VK_SUCCESS;
554}
555
556VkResult CreateEvent(VkDevice device,
557                     const VkEventCreateInfo*,
558                     VkEvent* event) {
559    *event = AllocHandle(device, HandleType::kEvent);
560    return VK_SUCCESS;
561}
562
563VkResult CreateFence(VkDevice device,
564                     const VkFenceCreateInfo*,
565                     VkFence* fence) {
566    *fence = AllocHandle(device, HandleType::kFence);
567    return VK_SUCCESS;
568}
569
570VkResult CreateFramebuffer(VkDevice device,
571                           const VkFramebufferCreateInfo*,
572                           VkFramebuffer* framebuffer) {
573    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
574    return VK_SUCCESS;
575}
576
577VkResult CreateImageView(VkDevice device,
578                         const VkImageViewCreateInfo*,
579                         VkImageView* view) {
580    *view = AllocHandle(device, HandleType::kImageView);
581    return VK_SUCCESS;
582}
583
584VkResult CreateGraphicsPipelines(VkDevice device,
585                                 VkPipelineCache,
586                                 uint32_t count,
587                                 const VkGraphicsPipelineCreateInfo*,
588                                 VkPipeline* pipelines) {
589    for (uint32_t i = 0; i < count; i++)
590        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
591    return VK_SUCCESS;
592}
593
594VkResult CreateComputePipelines(VkDevice device,
595                                VkPipelineCache,
596                                uint32_t count,
597                                const VkComputePipelineCreateInfo*,
598                                VkPipeline* pipelines) {
599    for (uint32_t i = 0; i < count; i++)
600        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
601    return VK_SUCCESS;
602}
603
604VkResult CreatePipelineCache(VkDevice device,
605                             const VkPipelineCacheCreateInfo*,
606                             VkPipelineCache* cache) {
607    *cache = AllocHandle(device, HandleType::kPipelineCache);
608    return VK_SUCCESS;
609}
610
611VkResult CreatePipelineLayout(VkDevice device,
612                              const VkPipelineLayoutCreateInfo*,
613                              VkPipelineLayout* layout) {
614    *layout = AllocHandle(device, HandleType::kPipelineLayout);
615    return VK_SUCCESS;
616}
617
618VkResult CreateQueryPool(VkDevice device,
619                         const VkQueryPoolCreateInfo*,
620                         VkQueryPool* pool) {
621    *pool = AllocHandle(device, HandleType::kQueryPool);
622    return VK_SUCCESS;
623}
624
625VkResult CreateRenderPass(VkDevice device,
626                          const VkRenderPassCreateInfo*,
627                          VkRenderPass* renderpass) {
628    *renderpass = AllocHandle(device, HandleType::kRenderPass);
629    return VK_SUCCESS;
630}
631
632VkResult CreateSampler(VkDevice device,
633                       const VkSamplerCreateInfo*,
634                       VkSampler* sampler) {
635    *sampler = AllocHandle(device, HandleType::kSampler);
636    return VK_SUCCESS;
637}
638
639VkResult CreateSemaphore(VkDevice device,
640                         const VkSemaphoreCreateInfo*,
641                         VkSemaphore* semaphore) {
642    *semaphore = AllocHandle(device, HandleType::kSemaphore);
643    return VK_SUCCESS;
644}
645
646VkResult CreateShader(VkDevice device,
647                      const VkShaderCreateInfo*,
648                      VkShader* shader) {
649    *shader = AllocHandle(device, HandleType::kShader);
650    return VK_SUCCESS;
651}
652
653VkResult CreateShaderModule(VkDevice device,
654                            const VkShaderModuleCreateInfo*,
655                            VkShaderModule* module) {
656    *module = AllocHandle(device, HandleType::kShaderModule);
657    return VK_SUCCESS;
658}
659
660VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
661                                         VkFormat,
662                                         VkImageUsageFlags,
663                                         int* grallocUsage) {
664    // The null driver never reads or writes the gralloc buffer
665    *grallocUsage = 0;
666    return VK_SUCCESS;
667}
668
669VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) {
670    close(fence);
671    return VK_SUCCESS;
672}
673
674VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) {
675    *fence = -1;
676    return VK_SUCCESS;
677}
678
679// -----------------------------------------------------------------------------
680// No-op entrypoints
681
682// clang-format off
683#pragma clang diagnostic push
684#pragma clang diagnostic ignored "-Wunused-parameter"
685
686void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
687    ALOGV("TODO: vk%s", __FUNCTION__);
688}
689
690void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
691    ALOGV("TODO: vk%s", __FUNCTION__);
692}
693
694void GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
695    ALOGV("TODO: vk%s", __FUNCTION__);
696}
697
698VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
699    ALOGV("TODO: vk%s", __FUNCTION__);
700    return VK_SUCCESS;
701}
702
703VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
704    ALOGV("TODO: vk%s", __FUNCTION__);
705    return VK_SUCCESS;
706}
707
708VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
709    ALOGV("TODO: vk%s", __FUNCTION__);
710    return VK_SUCCESS;
711}
712
713VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
714    return VK_SUCCESS;
715}
716
717VkResult QueueWaitIdle(VkQueue queue) {
718    ALOGV("TODO: vk%s", __FUNCTION__);
719    return VK_SUCCESS;
720}
721
722VkResult DeviceWaitIdle(VkDevice device) {
723    ALOGV("TODO: vk%s", __FUNCTION__);
724    return VK_SUCCESS;
725}
726
727void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
728}
729
730VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
731    ALOGV("TODO: vk%s", __FUNCTION__);
732    return VK_SUCCESS;
733}
734
735VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
736    ALOGV("TODO: vk%s", __FUNCTION__);
737    return VK_SUCCESS;
738}
739
740void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
741    ALOGV("TODO: vk%s", __FUNCTION__);
742}
743
744VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
745    return VK_SUCCESS;
746}
747
748VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
749    return VK_SUCCESS;
750}
751
752void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
753    ALOGV("TODO: vk%s", __FUNCTION__);
754}
755
756void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
757    ALOGV("TODO: vk%s", __FUNCTION__);
758}
759
760VkResult QueueBindSparseBufferMemory(VkQueue queue, VkBuffer buffer, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
761    ALOGV("TODO: vk%s", __FUNCTION__);
762    return VK_SUCCESS;
763}
764
765VkResult QueueBindSparseImageOpaqueMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
766    ALOGV("TODO: vk%s", __FUNCTION__);
767    return VK_SUCCESS;
768}
769
770VkResult QueueBindSparseImageMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseImageMemoryBindInfo* pBindInfo) {
771    ALOGV("TODO: vk%s", __FUNCTION__);
772    return VK_SUCCESS;
773}
774
775void DestroyFence(VkDevice device, VkFence fence) {
776}
777
778VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
779    return VK_SUCCESS;
780}
781
782VkResult GetFenceStatus(VkDevice device, VkFence fence) {
783    ALOGV("TODO: vk%s", __FUNCTION__);
784    return VK_SUCCESS;
785}
786
787VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
788    return VK_SUCCESS;
789}
790
791void DestroySemaphore(VkDevice device, VkSemaphore semaphore) {
792}
793
794void DestroyEvent(VkDevice device, VkEvent event) {
795}
796
797VkResult GetEventStatus(VkDevice device, VkEvent event) {
798    ALOGV("TODO: vk%s", __FUNCTION__);
799    return VK_SUCCESS;
800}
801
802VkResult SetEvent(VkDevice device, VkEvent event) {
803    ALOGV("TODO: vk%s", __FUNCTION__);
804    return VK_SUCCESS;
805}
806
807VkResult ResetEvent(VkDevice device, VkEvent event) {
808    ALOGV("TODO: vk%s", __FUNCTION__);
809    return VK_SUCCESS;
810}
811
812void DestroyQueryPool(VkDevice device, VkQueryPool queryPool) {
813}
814
815VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
816    ALOGV("TODO: vk%s", __FUNCTION__);
817    return VK_SUCCESS;
818}
819
820void DestroyBufferView(VkDevice device, VkBufferView bufferView) {
821}
822
823void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
824    ALOGV("TODO: vk%s", __FUNCTION__);
825}
826
827void DestroyImageView(VkDevice device, VkImageView imageView) {
828}
829
830void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule) {
831}
832
833void DestroyShader(VkDevice device, VkShader shader) {
834}
835
836void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache) {
837}
838
839VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
840    ALOGV("TODO: vk%s", __FUNCTION__);
841    return VK_SUCCESS;
842}
843
844VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
845    ALOGV("TODO: vk%s", __FUNCTION__);
846    return VK_SUCCESS;
847}
848
849void DestroyPipeline(VkDevice device, VkPipeline pipeline) {
850}
851
852void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout) {
853}
854
855void DestroySampler(VkDevice device, VkSampler sampler) {
856}
857
858void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout) {
859}
860
861void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool) {
862}
863
864VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
865    ALOGV("TODO: vk%s", __FUNCTION__);
866    return VK_SUCCESS;
867}
868
869void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
870    ALOGV("TODO: vk%s", __FUNCTION__);
871}
872
873VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
874    ALOGV("TODO: vk%s", __FUNCTION__);
875    return VK_SUCCESS;
876}
877
878void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) {
879}
880
881void DestroyRenderPass(VkDevice device, VkRenderPass renderPass) {
882}
883
884void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
885    ALOGV("TODO: vk%s", __FUNCTION__);
886}
887
888void DestroyCommandPool(VkDevice device, VkCmdPool cmdPool) {
889}
890
891VkResult ResetCommandPool(VkDevice device, VkCmdPool cmdPool, VkCmdPoolResetFlags flags) {
892    ALOGV("TODO: vk%s", __FUNCTION__);
893    return VK_SUCCESS;
894}
895
896VkResult BeginCommandBuffer(VkCmdBuffer cmdBuffer, const VkCmdBufferBeginInfo* pBeginInfo) {
897    return VK_SUCCESS;
898}
899
900VkResult EndCommandBuffer(VkCmdBuffer cmdBuffer) {
901    return VK_SUCCESS;
902}
903
904VkResult ResetCommandBuffer(VkCmdBuffer cmdBuffer, VkCmdBufferResetFlags flags) {
905    ALOGV("TODO: vk%s", __FUNCTION__);
906    return VK_SUCCESS;
907}
908
909void CmdBindPipeline(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
910}
911
912void CmdSetViewport(VkCmdBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
913}
914
915void CmdSetScissor(VkCmdBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
916}
917
918void CmdSetLineWidth(VkCmdBuffer cmdBuffer, float lineWidth) {
919}
920
921void CmdSetDepthBias(VkCmdBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
922}
923
924void CmdSetBlendConstants(VkCmdBuffer cmdBuffer, const float blendConst[4]) {
925}
926
927void CmdSetDepthBounds(VkCmdBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
928}
929
930void CmdSetStencilCompareMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
931}
932
933void CmdSetStencilWriteMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
934}
935
936void CmdSetStencilReference(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
937}
938
939void CmdBindDescriptorSets(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
940}
941
942void CmdBindIndexBuffer(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
943}
944
945void CmdBindVertexBuffers(VkCmdBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
946}
947
948void CmdDraw(VkCmdBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
949}
950
951void CmdDrawIndexed(VkCmdBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
952}
953
954void CmdDrawIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
955}
956
957void CmdDrawIndexedIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
958}
959
960void CmdDispatch(VkCmdBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
961}
962
963void CmdDispatchIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
964}
965
966void CmdCopyBuffer(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
967}
968
969void CmdCopyImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
970}
971
972void CmdBlitImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkTexFilter filter) {
973}
974
975void CmdCopyBufferToImage(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
976}
977
978void CmdCopyImageToBuffer(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
979}
980
981void CmdUpdateBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
982}
983
984void CmdFillBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
985}
986
987void CmdClearColorImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
988}
989
990void CmdClearDepthStencilImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
991}
992
993void CmdClearAttachments(VkCmdBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
994}
995
996void CmdResolveImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
997}
998
999void CmdSetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1000}
1001
1002void CmdResetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1003}
1004
1005void CmdWaitEvents(VkCmdBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1006}
1007
1008void CmdPipelineBarrier(VkCmdBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkBool32 byRegion, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1009}
1010
1011void CmdBeginQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1012}
1013
1014void CmdEndQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1015}
1016
1017void CmdResetQueryPool(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1018}
1019
1020void CmdWriteTimestamp(VkCmdBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1021}
1022
1023void CmdCopyQueryPoolResults(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1024}
1025
1026void CmdPushConstants(VkCmdBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1027}
1028
1029void CmdBeginRenderPass(VkCmdBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkRenderPassContents contents) {
1030}
1031
1032void CmdNextSubpass(VkCmdBuffer cmdBuffer, VkRenderPassContents contents) {
1033}
1034
1035void CmdEndRenderPass(VkCmdBuffer cmdBuffer) {
1036}
1037
1038void CmdExecuteCommands(VkCmdBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCmdBuffer* pCmdBuffers) {
1039}
1040
1041#pragma clang diagnostic pop
1042// clang-format on
1043
1044}  // namespace null_driver
1045