null_driver.cpp revision ae38f7355fd03b777313e13586585a74e0dee05b
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    const VkAllocCallbacks* alloc;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCmdBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kCmdPool,
71    kDescriptorPool,
72    kDescriptorSet,
73    kDescriptorSetLayout,
74    kEvent,
75    kFence,
76    kFramebuffer,
77    kImageView,
78    kPipeline,
79    kPipelineCache,
80    kPipelineLayout,
81    kQueryPool,
82    kRenderPass,
83    kSampler,
84    kSemaphore,
85    kShader,
86    kShaderModule,
87
88    kNumTypes
89};
90}  // namespace HandleType
91uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
92
93const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
94
95}  // anonymous namespace
96
97struct VkDevice_T {
98    hwvulkan_dispatch_t dispatch;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
134                        VkInstance* out_instance) {
135    // Assume the loader provided alloc callbacks even if the app didn't.
136    ALOG_ASSERT(
137        create_info->pAllocCb,
138        "Missing alloc callbacks, loader or app should have provided them");
139
140    VkInstance_T* instance =
141        static_cast<VkInstance_T*>(create_info->pAllocCb->pfnAlloc(
142            create_info->pAllocCb->pUserData, sizeof(VkInstance_T),
143            alignof(VkInstance_T), VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
144    if (!instance)
145        return VK_ERROR_OUT_OF_HOST_MEMORY;
146
147    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
148    instance->alloc = create_info->pAllocCb;
149    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
150
151    *out_instance = instance;
152    return VK_SUCCESS;
153}
154
155int CloseDevice(struct hw_device_t* /*device*/) {
156    // nothing to do - opening a device doesn't allocate any resources
157    return 0;
158}
159
160hwvulkan_device_t nulldrv_device = {
161    .common =
162        {
163            .tag = HARDWARE_DEVICE_TAG,
164            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
165            .module = &HAL_MODULE_INFO_SYM.common,
166            .close = CloseDevice,
167        },
168    .EnumerateInstanceExtensionProperties =
169        EnumerateInstanceExtensionProperties,
170    .CreateInstance = CreateInstance,
171    .GetInstanceProcAddr = GetInstanceProcAddr};
172
173int OpenDevice(const hw_module_t* /*module*/,
174               const char* id,
175               hw_device_t** device) {
176    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
177        *device = &nulldrv_device.common;
178        return 0;
179    }
180    return -ENOENT;
181}
182
183VkInstance_T* GetInstanceFromPhysicalDevice(
184    VkPhysicalDevice_T* physical_device) {
185    return reinterpret_cast<VkInstance_T*>(
186        reinterpret_cast<uintptr_t>(physical_device) -
187        offsetof(VkInstance_T, physical_device));
188}
189
190uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
191    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
192    ALOGE_IF(device->next_handle[type] == kHandleMask,
193             "non-dispatchable handles of type=%u are about to overflow", type);
194    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
195           (device->next_handle[type]++ & kHandleMask);
196}
197
198}  // namespace
199
200namespace null_driver {
201
202template <typename HandleT>
203struct HandleTraits {};
204
205template <typename HandleT>
206typename HandleTraits<HandleT>::PointerType GetObjectFromHandle(
207    const HandleT& h) {
208    return reinterpret_cast<typename HandleTraits<HandleT>::PointerType>(
209        uintptr_t(h.handle));
210}
211
212template <typename T>
213typename T::HandleType GetHandleToObject(const T* obj) {
214    return typename T::HandleType(reinterpret_cast<uintptr_t>(obj));
215}
216
217// -----------------------------------------------------------------------------
218// Global
219
220VkResult EnumerateInstanceExtensionProperties(const char*,
221                                              uint32_t* count,
222                                              VkExtensionProperties*) {
223    *count = 0;
224    return VK_SUCCESS;
225}
226
227PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
228    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
229    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
230        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
231    return proc;
232}
233
234PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
235    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
236    if (proc)
237        return proc;
238    if (strcmp(name, "vkGetSwapchainGrallocUsageANDROID") == 0)
239        return reinterpret_cast<PFN_vkVoidFunction>(
240            GetSwapchainGrallocUsageANDROID);
241    if (strcmp(name, "vkAcquireImageANDROID") == 0)
242        return reinterpret_cast<PFN_vkVoidFunction>(AcquireImageANDROID);
243    if (strcmp(name, "vkQueueSignalReleaseImageANDROID") == 0)
244        return reinterpret_cast<PFN_vkVoidFunction>(
245            QueueSignalReleaseImageANDROID);
246    return nullptr;
247}
248
249// -----------------------------------------------------------------------------
250// Instance
251
252void DestroyInstance(VkInstance instance) {
253    instance->alloc->pfnFree(instance->alloc->pUserData, instance);
254}
255
256// -----------------------------------------------------------------------------
257// PhysicalDevice
258
259VkResult EnumeratePhysicalDevices(VkInstance instance,
260                                  uint32_t* physical_device_count,
261                                  VkPhysicalDevice* physical_devices) {
262    if (physical_devices && *physical_device_count >= 1)
263        physical_devices[0] = &instance->physical_device;
264    *physical_device_count = 1;
265    return VK_SUCCESS;
266}
267
268VkResult GetPhysicalDeviceProperties(VkPhysicalDevice,
269                                     VkPhysicalDeviceProperties* properties) {
270    properties->apiVersion = VK_API_VERSION;
271    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
272    properties->vendorId = 0;
273    properties->deviceId = 0;
274    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
275    strcpy(properties->deviceName, "Android Vulkan Null Driver");
276    memset(properties->pipelineCacheUUID, 0,
277           sizeof(properties->pipelineCacheUUID));
278    return VK_SUCCESS;
279}
280
281VkResult GetPhysicalDeviceQueueFamilyProperties(
282    VkPhysicalDevice,
283    uint32_t* count,
284    VkQueueFamilyProperties* properties) {
285    if (properties) {
286        if (*count < 1)
287            return VK_INCOMPLETE;
288        properties->queueFlags =
289            VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_DMA_BIT;
290        properties->queueCount = 1;
291        properties->supportsTimestamps = VK_FALSE;
292    }
293    *count = 1;
294    return VK_SUCCESS;
295}
296
297VkResult GetPhysicalDeviceMemoryProperties(
298    VkPhysicalDevice,
299    VkPhysicalDeviceMemoryProperties* properties) {
300    properties->memoryTypeCount = 1;
301    properties->memoryTypes[0].propertyFlags =
302        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
303    properties->memoryTypes[0].heapIndex = 0;
304    properties->memoryHeapCount = 1;
305    properties->memoryHeaps[0].size = kMaxDeviceMemory;
306    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_HOST_LOCAL_BIT;
307    return VK_SUCCESS;
308}
309
310// -----------------------------------------------------------------------------
311// Device
312
313VkResult CreateDevice(VkPhysicalDevice physical_device,
314                      const VkDeviceCreateInfo*,
315                      VkDevice* out_device) {
316    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
317    VkDevice_T* device = static_cast<VkDevice_T*>(instance->alloc->pfnAlloc(
318        instance->alloc->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
319        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
320    if (!device)
321        return VK_ERROR_OUT_OF_HOST_MEMORY;
322
323    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
324    device->instance = instance;
325    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
326    std::fill(device->next_handle.begin(), device->next_handle.end(),
327              UINT64_C(0));
328
329    *out_device = device;
330    return VK_SUCCESS;
331}
332
333void DestroyDevice(VkDevice device) {
334    if (!device)
335        return;
336    const VkAllocCallbacks* alloc = device->instance->alloc;
337    alloc->pfnFree(alloc->pUserData, device);
338}
339
340VkResult GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
341    *queue = &device->queue;
342    return VK_SUCCESS;
343}
344
345// -----------------------------------------------------------------------------
346// CmdBuffer
347
348VkResult CreateCommandBuffer(VkDevice device,
349                             const VkCmdBufferCreateInfo*,
350                             VkCmdBuffer* out_cmdbuf) {
351    const VkAllocCallbacks* alloc = device->instance->alloc;
352    VkCmdBuffer_T* cmdbuf = static_cast<VkCmdBuffer_T*>(alloc->pfnAlloc(
353        alloc->pUserData, sizeof(VkCmdBuffer_T), alignof(VkCmdBuffer_T),
354        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
355    if (!cmdbuf)
356        return VK_ERROR_OUT_OF_HOST_MEMORY;
357    cmdbuf->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
358    *out_cmdbuf = cmdbuf;
359    return VK_SUCCESS;
360}
361
362void DestroyCommandBuffer(VkDevice device, VkCmdBuffer cmdbuf) {
363    const VkAllocCallbacks* alloc = device->instance->alloc;
364    alloc->pfnFree(alloc->pUserData, cmdbuf);
365}
366
367// -----------------------------------------------------------------------------
368// DeviceMemory
369
370struct DeviceMemory {
371    typedef VkDeviceMemory HandleType;
372    VkDeviceSize size;
373    alignas(16) uint8_t data[0];
374};
375template <>
376struct HandleTraits<VkDeviceMemory> {
377    typedef DeviceMemory* PointerType;
378};
379
380VkResult AllocMemory(VkDevice device,
381                     const VkMemoryAllocInfo* alloc_info,
382                     VkDeviceMemory* mem_handle) {
383    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
384        return VK_ERROR_OUT_OF_HOST_MEMORY;
385
386    const VkAllocCallbacks* alloc = device->instance->alloc;
387    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
388    DeviceMemory* mem = static_cast<DeviceMemory*>(
389        alloc->pfnAlloc(alloc->pUserData, size, alignof(DeviceMemory),
390                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
391    if (!mem)
392        return VK_ERROR_OUT_OF_HOST_MEMORY;
393    mem->size = size;
394    *mem_handle = GetHandleToObject(mem);
395    return VK_SUCCESS;
396}
397
398void FreeMemory(VkDevice device, VkDeviceMemory mem_handle) {
399    const VkAllocCallbacks* alloc = device->instance->alloc;
400    DeviceMemory* mem = GetObjectFromHandle(mem_handle);
401    alloc->pfnFree(alloc->pUserData, mem);
402}
403
404VkResult MapMemory(VkDevice,
405                   VkDeviceMemory mem_handle,
406                   VkDeviceSize offset,
407                   VkDeviceSize,
408                   VkMemoryMapFlags,
409                   void** out_ptr) {
410    DeviceMemory* mem = GetObjectFromHandle(mem_handle);
411    *out_ptr = &mem->data[0] + offset;
412    return VK_SUCCESS;
413}
414
415// -----------------------------------------------------------------------------
416// Buffer
417
418struct Buffer {
419    typedef VkBuffer HandleType;
420    VkDeviceSize size;
421};
422template <>
423struct HandleTraits<VkBuffer> {
424    typedef Buffer* PointerType;
425};
426
427VkResult CreateBuffer(VkDevice device,
428                      const VkBufferCreateInfo* create_info,
429                      VkBuffer* buffer_handle) {
430    ALOGW_IF(create_info->size > kMaxDeviceMemory,
431             "CreateBuffer: requested size 0x%" PRIx64
432             " exceeds max device memory size 0x%" PRIx64,
433             create_info->size, kMaxDeviceMemory);
434
435    const VkAllocCallbacks* alloc = device->instance->alloc;
436    Buffer* buffer = static_cast<Buffer*>(
437        alloc->pfnAlloc(alloc->pUserData, sizeof(Buffer), alignof(Buffer),
438                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
439    if (!buffer)
440        return VK_ERROR_OUT_OF_HOST_MEMORY;
441    buffer->size = create_info->size;
442    *buffer_handle = GetHandleToObject(buffer);
443    return VK_SUCCESS;
444}
445
446VkResult GetBufferMemoryRequirements(VkDevice,
447                                     VkBuffer buffer_handle,
448                                     VkMemoryRequirements* requirements) {
449    Buffer* buffer = GetObjectFromHandle(buffer_handle);
450    requirements->size = buffer->size;
451    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
452    requirements->memoryTypeBits = 0x1;
453    return VK_SUCCESS;
454}
455
456void DestroyBuffer(VkDevice device, VkBuffer buffer_handle) {
457    const VkAllocCallbacks* alloc = device->instance->alloc;
458    Buffer* buffer = GetObjectFromHandle(buffer_handle);
459    alloc->pfnFree(alloc->pUserData, buffer);
460}
461
462// -----------------------------------------------------------------------------
463// Image
464
465struct Image {
466    typedef VkImage HandleType;
467    VkDeviceSize size;
468};
469template <>
470struct HandleTraits<VkImage> {
471    typedef Image* PointerType;
472};
473
474VkResult CreateImage(VkDevice device,
475                     const VkImageCreateInfo* create_info,
476                     VkImage* image_handle) {
477    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
478        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
479        create_info->mipLevels != 1) {
480        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
481              create_info->imageType, create_info->format,
482              create_info->mipLevels);
483        return VK_UNSUPPORTED;
484    }
485
486    VkDeviceSize size =
487        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
488        create_info->arraySize * create_info->samples * 4u;
489    ALOGW_IF(size > kMaxDeviceMemory,
490             "CreateImage: image size 0x%" PRIx64
491             " exceeds max device memory size 0x%" PRIx64,
492             size, kMaxDeviceMemory);
493
494    const VkAllocCallbacks* alloc = device->instance->alloc;
495    Image* image = static_cast<Image*>(
496        alloc->pfnAlloc(alloc->pUserData, sizeof(Image), alignof(Image),
497                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
498    if (!image)
499        return VK_ERROR_OUT_OF_HOST_MEMORY;
500    image->size = size;
501    *image_handle = GetHandleToObject(image);
502    return VK_SUCCESS;
503}
504
505VkResult GetImageMemoryRequirements(VkDevice,
506                                    VkImage image_handle,
507                                    VkMemoryRequirements* requirements) {
508    Image* image = GetObjectFromHandle(image_handle);
509    requirements->size = image->size;
510    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
511    requirements->memoryTypeBits = 0x1;
512    return VK_SUCCESS;
513}
514
515void DestroyImage(VkDevice device, VkImage image_handle) {
516    const VkAllocCallbacks* alloc = device->instance->alloc;
517    Image* image = GetObjectFromHandle(image_handle);
518    alloc->pfnFree(alloc->pUserData, image);
519}
520
521// -----------------------------------------------------------------------------
522// No-op types
523
524VkResult CreateBufferView(VkDevice device,
525                          const VkBufferViewCreateInfo*,
526                          VkBufferView* view) {
527    *view = AllocHandle(device, HandleType::kBufferView);
528    return VK_SUCCESS;
529}
530
531VkResult CreateCommandPool(VkDevice device,
532                           const VkCmdPoolCreateInfo*,
533                           VkCmdPool* pool) {
534    *pool = AllocHandle(device, HandleType::kCmdPool);
535    return VK_SUCCESS;
536}
537
538VkResult CreateDescriptorPool(VkDevice device,
539                              const VkDescriptorPoolCreateInfo*,
540                              VkDescriptorPool* pool) {
541    *pool = AllocHandle(device, HandleType::kDescriptorPool);
542    return VK_SUCCESS;
543}
544
545VkResult AllocDescriptorSets(VkDevice device,
546                             VkDescriptorPool,
547                             VkDescriptorSetUsage,
548                             uint32_t count,
549                             const VkDescriptorSetLayout*,
550                             VkDescriptorSet* sets) {
551    for (uint32_t i = 0; i < count; i++)
552        sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
553    return VK_SUCCESS;
554}
555
556VkResult CreateDescriptorSetLayout(VkDevice device,
557                                   const VkDescriptorSetLayoutCreateInfo*,
558                                   VkDescriptorSetLayout* layout) {
559    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
560    return VK_SUCCESS;
561}
562
563VkResult CreateEvent(VkDevice device,
564                     const VkEventCreateInfo*,
565                     VkEvent* event) {
566    *event = AllocHandle(device, HandleType::kEvent);
567    return VK_SUCCESS;
568}
569
570VkResult CreateFence(VkDevice device,
571                     const VkFenceCreateInfo*,
572                     VkFence* fence) {
573    *fence = AllocHandle(device, HandleType::kFence);
574    return VK_SUCCESS;
575}
576
577VkResult CreateFramebuffer(VkDevice device,
578                           const VkFramebufferCreateInfo*,
579                           VkFramebuffer* framebuffer) {
580    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
581    return VK_SUCCESS;
582}
583
584VkResult CreateImageView(VkDevice device,
585                         const VkImageViewCreateInfo*,
586                         VkImageView* view) {
587    *view = AllocHandle(device, HandleType::kImageView);
588    return VK_SUCCESS;
589}
590
591VkResult CreateGraphicsPipelines(VkDevice device,
592                                 VkPipelineCache,
593                                 uint32_t count,
594                                 const VkGraphicsPipelineCreateInfo*,
595                                 VkPipeline* pipelines) {
596    for (uint32_t i = 0; i < count; i++)
597        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
598    return VK_SUCCESS;
599}
600
601VkResult CreateComputePipelines(VkDevice device,
602                                VkPipelineCache,
603                                uint32_t count,
604                                const VkComputePipelineCreateInfo*,
605                                VkPipeline* pipelines) {
606    for (uint32_t i = 0; i < count; i++)
607        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
608    return VK_SUCCESS;
609}
610
611VkResult CreatePipelineCache(VkDevice device,
612                             const VkPipelineCacheCreateInfo*,
613                             VkPipelineCache* cache) {
614    *cache = AllocHandle(device, HandleType::kPipelineCache);
615    return VK_SUCCESS;
616}
617
618VkResult CreatePipelineLayout(VkDevice device,
619                              const VkPipelineLayoutCreateInfo*,
620                              VkPipelineLayout* layout) {
621    *layout = AllocHandle(device, HandleType::kPipelineLayout);
622    return VK_SUCCESS;
623}
624
625VkResult CreateQueryPool(VkDevice device,
626                         const VkQueryPoolCreateInfo*,
627                         VkQueryPool* pool) {
628    *pool = AllocHandle(device, HandleType::kQueryPool);
629    return VK_SUCCESS;
630}
631
632VkResult CreateRenderPass(VkDevice device,
633                          const VkRenderPassCreateInfo*,
634                          VkRenderPass* renderpass) {
635    *renderpass = AllocHandle(device, HandleType::kRenderPass);
636    return VK_SUCCESS;
637}
638
639VkResult CreateSampler(VkDevice device,
640                       const VkSamplerCreateInfo*,
641                       VkSampler* sampler) {
642    *sampler = AllocHandle(device, HandleType::kSampler);
643    return VK_SUCCESS;
644}
645
646VkResult CreateSemaphore(VkDevice device,
647                         const VkSemaphoreCreateInfo*,
648                         VkSemaphore* semaphore) {
649    *semaphore = AllocHandle(device, HandleType::kSemaphore);
650    return VK_SUCCESS;
651}
652
653VkResult CreateShader(VkDevice device,
654                      const VkShaderCreateInfo*,
655                      VkShader* shader) {
656    *shader = AllocHandle(device, HandleType::kShader);
657    return VK_SUCCESS;
658}
659
660VkResult CreateShaderModule(VkDevice device,
661                            const VkShaderModuleCreateInfo*,
662                            VkShaderModule* module) {
663    *module = AllocHandle(device, HandleType::kShaderModule);
664    return VK_SUCCESS;
665}
666
667VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
668                                         VkFormat,
669                                         VkImageUsageFlags,
670                                         int* grallocUsage) {
671    // The null driver never reads or writes the gralloc buffer
672    *grallocUsage = 0;
673    return VK_SUCCESS;
674}
675
676VkResult AcquireImageANDROID(VkDevice, VkImage, int fence, VkSemaphore) {
677    close(fence);
678    return VK_SUCCESS;
679}
680
681VkResult QueueSignalReleaseImageANDROID(VkQueue, VkImage, int* fence) {
682    *fence = -1;
683    return VK_SUCCESS;
684}
685
686// -----------------------------------------------------------------------------
687// No-op entrypoints
688
689// clang-format off
690#pragma clang diagnostic push
691#pragma clang diagnostic ignored "-Wunused-parameter"
692
693VkResult GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
694    ALOGV("TODO: vk%s", __FUNCTION__);
695    return VK_SUCCESS;
696}
697
698VkResult GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
699    ALOGV("TODO: vk%s", __FUNCTION__);
700    return VK_SUCCESS;
701}
702
703VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
704    ALOGV("TODO: vk%s", __FUNCTION__);
705    return VK_SUCCESS;
706}
707
708VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
709    ALOGV("TODO: vk%s", __FUNCTION__);
710    return VK_SUCCESS;
711}
712
713VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
714    ALOGV("TODO: vk%s", __FUNCTION__);
715    return VK_SUCCESS;
716}
717
718VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
719    ALOGV("TODO: vk%s", __FUNCTION__);
720    return VK_SUCCESS;
721}
722
723VkResult QueueSubmit(VkQueue queue, uint32_t cmdBufferCount, const VkCmdBuffer* pCmdBuffers, VkFence fence) {
724    return VK_SUCCESS;
725}
726
727VkResult QueueWaitIdle(VkQueue queue) {
728    ALOGV("TODO: vk%s", __FUNCTION__);
729    return VK_SUCCESS;
730}
731
732VkResult DeviceWaitIdle(VkDevice device) {
733    ALOGV("TODO: vk%s", __FUNCTION__);
734    return VK_SUCCESS;
735}
736
737void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
738}
739
740VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
741    ALOGV("TODO: vk%s", __FUNCTION__);
742    return VK_SUCCESS;
743}
744
745VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
746    ALOGV("TODO: vk%s", __FUNCTION__);
747    return VK_SUCCESS;
748}
749
750VkResult GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
751    ALOGV("TODO: vk%s", __FUNCTION__);
752    return VK_SUCCESS;
753}
754
755VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
756    return VK_SUCCESS;
757}
758
759VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
760    return VK_SUCCESS;
761}
762
763VkResult GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
764    ALOGV("TODO: vk%s", __FUNCTION__);
765    return VK_SUCCESS;
766}
767
768VkResult GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
769    ALOGV("TODO: vk%s", __FUNCTION__);
770    return VK_SUCCESS;
771}
772
773VkResult QueueBindSparseBufferMemory(VkQueue queue, VkBuffer buffer, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
774    ALOGV("TODO: vk%s", __FUNCTION__);
775    return VK_SUCCESS;
776}
777
778VkResult QueueBindSparseImageOpaqueMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
779    ALOGV("TODO: vk%s", __FUNCTION__);
780    return VK_SUCCESS;
781}
782
783VkResult QueueBindSparseImageMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseImageMemoryBindInfo* pBindInfo) {
784    ALOGV("TODO: vk%s", __FUNCTION__);
785    return VK_SUCCESS;
786}
787
788void DestroyFence(VkDevice device, VkFence fence) {
789}
790
791VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
792    return VK_SUCCESS;
793}
794
795VkResult GetFenceStatus(VkDevice device, VkFence fence) {
796    ALOGV("TODO: vk%s", __FUNCTION__);
797    return VK_SUCCESS;
798}
799
800VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
801    return VK_SUCCESS;
802}
803
804void DestroySemaphore(VkDevice device, VkSemaphore semaphore) {
805}
806
807VkResult QueueSignalSemaphore(VkQueue queue, VkSemaphore semaphore) {
808    ALOGV("TODO: vk%s", __FUNCTION__);
809    return VK_SUCCESS;
810}
811
812VkResult QueueWaitSemaphore(VkQueue queue, VkSemaphore semaphore) {
813    return VK_SUCCESS;
814}
815
816void DestroyEvent(VkDevice device, VkEvent event) {
817}
818
819VkResult GetEventStatus(VkDevice device, VkEvent event) {
820    ALOGV("TODO: vk%s", __FUNCTION__);
821    return VK_SUCCESS;
822}
823
824VkResult SetEvent(VkDevice device, VkEvent event) {
825    ALOGV("TODO: vk%s", __FUNCTION__);
826    return VK_SUCCESS;
827}
828
829VkResult ResetEvent(VkDevice device, VkEvent event) {
830    ALOGV("TODO: vk%s", __FUNCTION__);
831    return VK_SUCCESS;
832}
833
834void DestroyQueryPool(VkDevice device, VkQueryPool queryPool) {
835}
836
837VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t* pDataSize, void* pData, VkQueryResultFlags flags) {
838    ALOGV("TODO: vk%s", __FUNCTION__);
839    return VK_SUCCESS;
840}
841
842void DestroyBufferView(VkDevice device, VkBufferView bufferView) {
843}
844
845VkResult GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
846    ALOGV("TODO: vk%s", __FUNCTION__);
847    return VK_SUCCESS;
848}
849
850void DestroyImageView(VkDevice device, VkImageView imageView) {
851}
852
853void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule) {
854}
855
856void DestroyShader(VkDevice device, VkShader shader) {
857}
858
859void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache) {
860}
861
862size_t GetPipelineCacheSize(VkDevice device, VkPipelineCache pipelineCache) {
863    ALOGV("TODO: vk%s", __FUNCTION__);
864    return VK_SUCCESS;
865}
866
867VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, void* pData) {
868    ALOGV("TODO: vk%s", __FUNCTION__);
869    return VK_SUCCESS;
870}
871
872VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
873    ALOGV("TODO: vk%s", __FUNCTION__);
874    return VK_SUCCESS;
875}
876
877void DestroyPipeline(VkDevice device, VkPipeline pipeline) {
878}
879
880void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout) {
881}
882
883void DestroySampler(VkDevice device, VkSampler sampler) {
884}
885
886void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout) {
887}
888
889void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool) {
890}
891
892VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool) {
893    ALOGV("TODO: vk%s", __FUNCTION__);
894    return VK_SUCCESS;
895}
896
897void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
898    ALOGV("TODO: vk%s", __FUNCTION__);
899}
900
901VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
902    ALOGV("TODO: vk%s", __FUNCTION__);
903    return VK_SUCCESS;
904}
905
906void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) {
907}
908
909void DestroyRenderPass(VkDevice device, VkRenderPass renderPass) {
910}
911
912VkResult GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
913    ALOGV("TODO: vk%s", __FUNCTION__);
914    return VK_SUCCESS;
915}
916
917void DestroyCommandPool(VkDevice device, VkCmdPool cmdPool) {
918}
919
920VkResult ResetCommandPool(VkDevice device, VkCmdPool cmdPool, VkCmdPoolResetFlags flags) {
921    ALOGV("TODO: vk%s", __FUNCTION__);
922    return VK_SUCCESS;
923}
924
925VkResult BeginCommandBuffer(VkCmdBuffer cmdBuffer, const VkCmdBufferBeginInfo* pBeginInfo) {
926    return VK_SUCCESS;
927}
928
929VkResult EndCommandBuffer(VkCmdBuffer cmdBuffer) {
930    return VK_SUCCESS;
931}
932
933VkResult ResetCommandBuffer(VkCmdBuffer cmdBuffer, VkCmdBufferResetFlags flags) {
934    ALOGV("TODO: vk%s", __FUNCTION__);
935    return VK_SUCCESS;
936}
937
938void CmdBindPipeline(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
939}
940
941void CmdSetViewport(VkCmdBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
942}
943
944void CmdSetScissor(VkCmdBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
945}
946
947void CmdSetLineWidth(VkCmdBuffer cmdBuffer, float lineWidth) {
948}
949
950void CmdSetDepthBias(VkCmdBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
951}
952
953void CmdSetBlendConstants(VkCmdBuffer cmdBuffer, const float blendConst[4]) {
954}
955
956void CmdSetDepthBounds(VkCmdBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
957}
958
959void CmdSetStencilCompareMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
960}
961
962void CmdSetStencilWriteMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
963}
964
965void CmdSetStencilReference(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
966}
967
968void CmdBindDescriptorSets(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
969}
970
971void CmdBindIndexBuffer(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
972}
973
974void CmdBindVertexBuffers(VkCmdBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
975}
976
977void CmdDraw(VkCmdBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
978}
979
980void CmdDrawIndexed(VkCmdBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
981}
982
983void CmdDrawIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
984}
985
986void CmdDrawIndexedIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
987}
988
989void CmdDispatch(VkCmdBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
990}
991
992void CmdDispatchIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
993}
994
995void CmdCopyBuffer(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
996}
997
998void CmdCopyImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
999}
1000
1001void CmdBlitImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkTexFilter filter) {
1002}
1003
1004void CmdCopyBufferToImage(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1005}
1006
1007void CmdCopyImageToBuffer(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1008}
1009
1010void CmdUpdateBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1011}
1012
1013void CmdFillBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1014}
1015
1016void CmdClearColorImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1017}
1018
1019void CmdClearDepthStencilImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1020}
1021
1022void CmdClearAttachments(VkCmdBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkRect3D* pRects) {
1023}
1024
1025void CmdResolveImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1026}
1027
1028void CmdSetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1029}
1030
1031void CmdResetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1032}
1033
1034void CmdWaitEvents(VkCmdBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1035}
1036
1037void CmdPipelineBarrier(VkCmdBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkBool32 byRegion, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1038}
1039
1040void CmdBeginQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1041}
1042
1043void CmdEndQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1044}
1045
1046void CmdResetQueryPool(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1047}
1048
1049void CmdWriteTimestamp(VkCmdBuffer cmdBuffer, VkTimestampType timestampType, VkBuffer destBuffer, VkDeviceSize destOffset) {
1050}
1051
1052void CmdCopyQueryPoolResults(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1053}
1054
1055void CmdPushConstants(VkCmdBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1056}
1057
1058void CmdBeginRenderPass(VkCmdBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkRenderPassContents contents) {
1059}
1060
1061void CmdNextSubpass(VkCmdBuffer cmdBuffer, VkRenderPassContents contents) {
1062}
1063
1064void CmdEndRenderPass(VkCmdBuffer cmdBuffer) {
1065}
1066
1067void CmdExecuteCommands(VkCmdBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCmdBuffer* pCmdBuffers) {
1068}
1069
1070#pragma clang diagnostic pop
1071// clang-format on
1072
1073}  // namespace null_driver
1074