null_driver.cpp revision 275d76c8158c90ec5317b82cb10b094bca2b43cf
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <inttypes.h>
20#include <string.h>
21#include <algorithm>
22#include <array>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver_gen.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCommandBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kDescriptorPool,
71    kDescriptorSet,
72    kDescriptorSetLayout,
73    kEvent,
74    kFence,
75    kFramebuffer,
76    kImageView,
77    kPipeline,
78    kPipelineCache,
79    kPipelineLayout,
80    kQueryPool,
81    kRenderPass,
82    kSampler,
83    kSemaphore,
84    kShaderModule,
85
86    kNumTypes
87};
88}  // namespace HandleType
89uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
90
91const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
92
93}  // anonymous namespace
94
95struct VkDevice_T {
96    hwvulkan_dispatch_t dispatch;
97    VkAllocationCallbacks allocator;
98    VkInstance_T* instance;
99    VkQueue_T queue;
100    std::array<uint64_t, HandleType::kNumTypes> next_handle;
101};
102
103// -----------------------------------------------------------------------------
104// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
105// later.
106
107namespace {
108int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
109hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
110}  // namespace
111
112#pragma clang diagnostic push
113#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
114__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
115    .common =
116        {
117            .tag = HARDWARE_MODULE_TAG,
118            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
119            .hal_api_version = HARDWARE_HAL_API_VERSION,
120            .id = HWVULKAN_HARDWARE_MODULE_ID,
121            .name = "Null Vulkan Driver",
122            .author = "The Android Open Source Project",
123            .methods = &nulldrv_module_methods,
124        },
125};
126#pragma clang diagnostic pop
127
128// -----------------------------------------------------------------------------
129
130namespace {
131
132int CloseDevice(struct hw_device_t* /*device*/) {
133    // nothing to do - opening a device doesn't allocate any resources
134    return 0;
135}
136
137hwvulkan_device_t nulldrv_device = {
138    .common =
139        {
140            .tag = HARDWARE_DEVICE_TAG,
141            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
142            .module = &HAL_MODULE_INFO_SYM.common,
143            .close = CloseDevice,
144        },
145    .EnumerateInstanceExtensionProperties =
146        EnumerateInstanceExtensionProperties,
147    .CreateInstance = CreateInstance,
148    .GetInstanceProcAddr = GetInstanceProcAddr};
149
150int OpenDevice(const hw_module_t* /*module*/,
151               const char* id,
152               hw_device_t** device) {
153    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
154        *device = &nulldrv_device.common;
155        return 0;
156    }
157    return -ENOENT;
158}
159
160VkInstance_T* GetInstanceFromPhysicalDevice(
161    VkPhysicalDevice_T* physical_device) {
162    return reinterpret_cast<VkInstance_T*>(
163        reinterpret_cast<uintptr_t>(physical_device) -
164        offsetof(VkInstance_T, physical_device));
165}
166
167template <class Handle>
168Handle AllocHandle(VkDevice device, HandleType::Enum type) {
169    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
170    ALOGE_IF(device->next_handle[type] == kHandleMask,
171             "non-dispatchable handles of type=%u are about to overflow", type);
172    return reinterpret_cast<Handle>(
173        (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
174        (device->next_handle[type]++ & kHandleMask));
175}
176
177}  // namespace
178
179namespace null_driver {
180
181#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
182    T* Get##T##FromHandle(Vk##T h);                     \
183    T* Get##T##FromHandle(Vk##T h) {                    \
184        return reinterpret_cast<T*>(uintptr_t(h));      \
185    }                                                   \
186    Vk##T GetHandleTo##T(const T* obj);                 \
187    Vk##T GetHandleTo##T(const T* obj) {                \
188        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
189    }
190
191// -----------------------------------------------------------------------------
192// Global
193
194VKAPI_ATTR
195VkResult EnumerateInstanceExtensionProperties(const char*,
196                                              uint32_t* count,
197                                              VkExtensionProperties*) {
198    *count = 0;
199    return VK_SUCCESS;
200}
201
202VKAPI_ATTR
203VkResult CreateInstance(const VkInstanceCreateInfo* /*create_info*/,
204                        const VkAllocationCallbacks* allocator,
205                        VkInstance* out_instance) {
206    // Assume the loader provided alloc callbacks even if the app didn't.
207    ALOG_ASSERT(
208        allocator,
209        "Missing alloc callbacks, loader or app should have provided them");
210
211    VkInstance_T* instance =
212        static_cast<VkInstance_T*>(allocator->pfnAllocation(
213            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
214            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
215    if (!instance)
216        return VK_ERROR_OUT_OF_HOST_MEMORY;
217
218    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
219    instance->allocator = *allocator;
220    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
221
222    *out_instance = instance;
223    return VK_SUCCESS;
224}
225
226VKAPI_ATTR
227PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
228    return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
229}
230
231VKAPI_ATTR
232PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
233    return GetInstanceProcAddr(name);
234}
235
236// -----------------------------------------------------------------------------
237// Instance
238
239void DestroyInstance(VkInstance instance,
240                     const VkAllocationCallbacks* /*allocator*/) {
241    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
242}
243
244// -----------------------------------------------------------------------------
245// PhysicalDevice
246
247VkResult EnumeratePhysicalDevices(VkInstance instance,
248                                  uint32_t* physical_device_count,
249                                  VkPhysicalDevice* physical_devices) {
250    if (physical_devices && *physical_device_count >= 1)
251        physical_devices[0] = &instance->physical_device;
252    *physical_device_count = 1;
253    return VK_SUCCESS;
254}
255
256void GetPhysicalDeviceProperties(VkPhysicalDevice,
257                                 VkPhysicalDeviceProperties* properties) {
258    properties->apiVersion = VK_API_VERSION;
259    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
260    properties->vendorID = 0;
261    properties->deviceID = 0;
262    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
263    strcpy(properties->deviceName, "Android Vulkan Null Driver");
264    memset(properties->pipelineCacheUUID, 0,
265           sizeof(properties->pipelineCacheUUID));
266}
267
268void GetPhysicalDeviceQueueFamilyProperties(
269    VkPhysicalDevice,
270    uint32_t* count,
271    VkQueueFamilyProperties* properties) {
272    if (properties) {
273        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
274                                 VK_QUEUE_TRANSFER_BIT;
275        properties->queueCount = 1;
276        properties->timestampValidBits = 64;
277    }
278    *count = 1;
279}
280
281void GetPhysicalDeviceMemoryProperties(
282    VkPhysicalDevice,
283    VkPhysicalDeviceMemoryProperties* properties) {
284    properties->memoryTypeCount = 1;
285    properties->memoryTypes[0].propertyFlags =
286        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
287        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
288        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
289        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
290    properties->memoryTypes[0].heapIndex = 0;
291    properties->memoryHeapCount = 1;
292    properties->memoryHeaps[0].size = kMaxDeviceMemory;
293    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
294}
295
296// -----------------------------------------------------------------------------
297// Device
298
299VkResult CreateDevice(VkPhysicalDevice physical_device,
300                      const VkDeviceCreateInfo*,
301                      const VkAllocationCallbacks* allocator,
302                      VkDevice* out_device) {
303    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
304    if (!allocator)
305        allocator = &instance->allocator;
306    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
307        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
308        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
309    if (!device)
310        return VK_ERROR_OUT_OF_HOST_MEMORY;
311
312    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
313    device->allocator = *allocator;
314    device->instance = instance;
315    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
316    std::fill(device->next_handle.begin(), device->next_handle.end(),
317              UINT64_C(0));
318
319    *out_device = device;
320    return VK_SUCCESS;
321}
322
323void DestroyDevice(VkDevice device,
324                   const VkAllocationCallbacks* /*allocator*/) {
325    if (!device)
326        return;
327    device->allocator.pfnFree(device->allocator.pUserData, device);
328}
329
330void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
331    *queue = &device->queue;
332}
333
334// -----------------------------------------------------------------------------
335// CommandPool
336
337struct CommandPool {
338    typedef VkCommandPool HandleType;
339    VkAllocationCallbacks allocator;
340};
341DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
342
343VkResult CreateCommandPool(VkDevice device,
344                           const VkCommandPoolCreateInfo* /*create_info*/,
345                           const VkAllocationCallbacks* allocator,
346                           VkCommandPool* cmd_pool) {
347    if (!allocator)
348        allocator = &device->allocator;
349    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
350        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
351        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
352    if (!pool)
353        return VK_ERROR_OUT_OF_HOST_MEMORY;
354    pool->allocator = *allocator;
355    *cmd_pool = GetHandleToCommandPool(pool);
356    return VK_SUCCESS;
357}
358
359void DestroyCommandPool(VkDevice /*device*/,
360                        VkCommandPool cmd_pool,
361                        const VkAllocationCallbacks* /*allocator*/) {
362    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
363    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
364}
365
366// -----------------------------------------------------------------------------
367// CmdBuffer
368
369VkResult AllocateCommandBuffers(VkDevice /*device*/,
370                                const VkCommandBufferAllocateInfo* alloc_info,
371                                VkCommandBuffer* cmdbufs) {
372    VkResult result = VK_SUCCESS;
373    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
374    std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
375    for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
376        cmdbufs[i] =
377            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
378                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
379                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
380        if (!cmdbufs[i]) {
381            result = VK_ERROR_OUT_OF_HOST_MEMORY;
382            break;
383        }
384        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
385    }
386    if (result != VK_SUCCESS) {
387        for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
388            if (!cmdbufs[i])
389                break;
390            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
391        }
392    }
393    return result;
394}
395
396void FreeCommandBuffers(VkDevice /*device*/,
397                        VkCommandPool cmd_pool,
398                        uint32_t count,
399                        const VkCommandBuffer* cmdbufs) {
400    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
401    for (uint32_t i = 0; i < count; i++)
402        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
403}
404
405// -----------------------------------------------------------------------------
406// DeviceMemory
407
408struct DeviceMemory {
409    typedef VkDeviceMemory HandleType;
410    VkDeviceSize size;
411    alignas(16) uint8_t data[0];
412};
413DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
414
415VkResult AllocateMemory(VkDevice device,
416                        const VkMemoryAllocateInfo* alloc_info,
417                        const VkAllocationCallbacks* allocator,
418                        VkDeviceMemory* mem_handle) {
419    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
420        return VK_ERROR_OUT_OF_HOST_MEMORY;
421    if (!allocator)
422        allocator = &device->allocator;
423
424    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
425    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
426        allocator->pUserData, size, alignof(DeviceMemory),
427        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
428    if (!mem)
429        return VK_ERROR_OUT_OF_HOST_MEMORY;
430    mem->size = size;
431    *mem_handle = GetHandleToDeviceMemory(mem);
432    return VK_SUCCESS;
433}
434
435void FreeMemory(VkDevice device,
436                VkDeviceMemory mem_handle,
437                const VkAllocationCallbacks* allocator) {
438    if (!allocator)
439        allocator = &device->allocator;
440    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
441    allocator->pfnFree(allocator->pUserData, mem);
442}
443
444VkResult MapMemory(VkDevice,
445                   VkDeviceMemory mem_handle,
446                   VkDeviceSize offset,
447                   VkDeviceSize,
448                   VkMemoryMapFlags,
449                   void** out_ptr) {
450    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
451    *out_ptr = &mem->data[0] + offset;
452    return VK_SUCCESS;
453}
454
455// -----------------------------------------------------------------------------
456// Buffer
457
458struct Buffer {
459    typedef VkBuffer HandleType;
460    VkDeviceSize size;
461};
462DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
463
464VkResult CreateBuffer(VkDevice device,
465                      const VkBufferCreateInfo* create_info,
466                      const VkAllocationCallbacks* allocator,
467                      VkBuffer* buffer_handle) {
468    ALOGW_IF(create_info->size > kMaxDeviceMemory,
469             "CreateBuffer: requested size 0x%" PRIx64
470             " exceeds max device memory size 0x%" PRIx64,
471             create_info->size, kMaxDeviceMemory);
472    if (!allocator)
473        allocator = &device->allocator;
474    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
475        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
476        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
477    if (!buffer)
478        return VK_ERROR_OUT_OF_HOST_MEMORY;
479    buffer->size = create_info->size;
480    *buffer_handle = GetHandleToBuffer(buffer);
481    return VK_SUCCESS;
482}
483
484void GetBufferMemoryRequirements(VkDevice,
485                                 VkBuffer buffer_handle,
486                                 VkMemoryRequirements* requirements) {
487    Buffer* buffer = GetBufferFromHandle(buffer_handle);
488    requirements->size = buffer->size;
489    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
490    requirements->memoryTypeBits = 0x1;
491}
492
493void DestroyBuffer(VkDevice device,
494                   VkBuffer buffer_handle,
495                   const VkAllocationCallbacks* allocator) {
496    if (!allocator)
497        allocator = &device->allocator;
498    Buffer* buffer = GetBufferFromHandle(buffer_handle);
499    allocator->pfnFree(allocator->pUserData, buffer);
500}
501
502// -----------------------------------------------------------------------------
503// Image
504
505struct Image {
506    typedef VkImage HandleType;
507    VkDeviceSize size;
508};
509DEFINE_OBJECT_HANDLE_CONVERSION(Image)
510
511VkResult CreateImage(VkDevice device,
512                     const VkImageCreateInfo* create_info,
513                     const VkAllocationCallbacks* allocator,
514                     VkImage* image_handle) {
515    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
516        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
517        create_info->mipLevels != 1) {
518        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
519              create_info->imageType, create_info->format,
520              create_info->mipLevels);
521        return VK_ERROR_OUT_OF_HOST_MEMORY;
522    }
523
524    VkDeviceSize size =
525        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
526        create_info->arrayLayers * create_info->samples * 4u;
527    ALOGW_IF(size > kMaxDeviceMemory,
528             "CreateImage: image size 0x%" PRIx64
529             " exceeds max device memory size 0x%" PRIx64,
530             size, kMaxDeviceMemory);
531
532    if (!allocator)
533        allocator = &device->allocator;
534    Image* image = static_cast<Image*>(allocator->pfnAllocation(
535        allocator->pUserData, sizeof(Image), alignof(Image),
536        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
537    if (!image)
538        return VK_ERROR_OUT_OF_HOST_MEMORY;
539    image->size = size;
540    *image_handle = GetHandleToImage(image);
541    return VK_SUCCESS;
542}
543
544void GetImageMemoryRequirements(VkDevice,
545                                VkImage image_handle,
546                                VkMemoryRequirements* requirements) {
547    Image* image = GetImageFromHandle(image_handle);
548    requirements->size = image->size;
549    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
550    requirements->memoryTypeBits = 0x1;
551}
552
553void DestroyImage(VkDevice device,
554                  VkImage image_handle,
555                  const VkAllocationCallbacks* allocator) {
556    if (!allocator)
557        allocator = &device->allocator;
558    Image* image = GetImageFromHandle(image_handle);
559    allocator->pfnFree(allocator->pUserData, image);
560}
561
562// -----------------------------------------------------------------------------
563// No-op types
564
565VkResult CreateBufferView(VkDevice device,
566                          const VkBufferViewCreateInfo*,
567                          const VkAllocationCallbacks* /*allocator*/,
568                          VkBufferView* view) {
569    *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
570    return VK_SUCCESS;
571}
572
573VkResult CreateDescriptorPool(VkDevice device,
574                              const VkDescriptorPoolCreateInfo*,
575                              const VkAllocationCallbacks* /*allocator*/,
576                              VkDescriptorPool* pool) {
577    *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
578    return VK_SUCCESS;
579}
580
581VkResult AllocateDescriptorSets(VkDevice device,
582                                const VkDescriptorSetAllocateInfo* alloc_info,
583                                VkDescriptorSet* descriptor_sets) {
584    for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
585        descriptor_sets[i] =
586            AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
587    return VK_SUCCESS;
588}
589
590VkResult CreateDescriptorSetLayout(VkDevice device,
591                                   const VkDescriptorSetLayoutCreateInfo*,
592                                   const VkAllocationCallbacks* /*allocator*/,
593                                   VkDescriptorSetLayout* layout) {
594    *layout = AllocHandle<VkDescriptorSetLayout>(
595        device, HandleType::kDescriptorSetLayout);
596    return VK_SUCCESS;
597}
598
599VkResult CreateEvent(VkDevice device,
600                     const VkEventCreateInfo*,
601                     const VkAllocationCallbacks* /*allocator*/,
602                     VkEvent* event) {
603    *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
604    return VK_SUCCESS;
605}
606
607VkResult CreateFence(VkDevice device,
608                     const VkFenceCreateInfo*,
609                     const VkAllocationCallbacks* /*allocator*/,
610                     VkFence* fence) {
611    *fence = AllocHandle<VkFence>(device, HandleType::kFence);
612    return VK_SUCCESS;
613}
614
615VkResult CreateFramebuffer(VkDevice device,
616                           const VkFramebufferCreateInfo*,
617                           const VkAllocationCallbacks* /*allocator*/,
618                           VkFramebuffer* framebuffer) {
619    *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
620    return VK_SUCCESS;
621}
622
623VkResult CreateImageView(VkDevice device,
624                         const VkImageViewCreateInfo*,
625                         const VkAllocationCallbacks* /*allocator*/,
626                         VkImageView* view) {
627    *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
628    return VK_SUCCESS;
629}
630
631VkResult CreateGraphicsPipelines(VkDevice device,
632                                 VkPipelineCache,
633                                 uint32_t count,
634                                 const VkGraphicsPipelineCreateInfo*,
635                                 const VkAllocationCallbacks* /*allocator*/,
636                                 VkPipeline* pipelines) {
637    for (uint32_t i = 0; i < count; i++)
638        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
639    return VK_SUCCESS;
640}
641
642VkResult CreateComputePipelines(VkDevice device,
643                                VkPipelineCache,
644                                uint32_t count,
645                                const VkComputePipelineCreateInfo*,
646                                const VkAllocationCallbacks* /*allocator*/,
647                                VkPipeline* pipelines) {
648    for (uint32_t i = 0; i < count; i++)
649        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
650    return VK_SUCCESS;
651}
652
653VkResult CreatePipelineCache(VkDevice device,
654                             const VkPipelineCacheCreateInfo*,
655                             const VkAllocationCallbacks* /*allocator*/,
656                             VkPipelineCache* cache) {
657    *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
658    return VK_SUCCESS;
659}
660
661VkResult CreatePipelineLayout(VkDevice device,
662                              const VkPipelineLayoutCreateInfo*,
663                              const VkAllocationCallbacks* /*allocator*/,
664                              VkPipelineLayout* layout) {
665    *layout =
666        AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
667    return VK_SUCCESS;
668}
669
670VkResult CreateQueryPool(VkDevice device,
671                         const VkQueryPoolCreateInfo*,
672                         const VkAllocationCallbacks* /*allocator*/,
673                         VkQueryPool* pool) {
674    *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
675    return VK_SUCCESS;
676}
677
678VkResult CreateRenderPass(VkDevice device,
679                          const VkRenderPassCreateInfo*,
680                          const VkAllocationCallbacks* /*allocator*/,
681                          VkRenderPass* renderpass) {
682    *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
683    return VK_SUCCESS;
684}
685
686VkResult CreateSampler(VkDevice device,
687                       const VkSamplerCreateInfo*,
688                       const VkAllocationCallbacks* /*allocator*/,
689                       VkSampler* sampler) {
690    *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
691    return VK_SUCCESS;
692}
693
694VkResult CreateSemaphore(VkDevice device,
695                         const VkSemaphoreCreateInfo*,
696                         const VkAllocationCallbacks* /*allocator*/,
697                         VkSemaphore* semaphore) {
698    *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
699    return VK_SUCCESS;
700}
701
702VkResult CreateShaderModule(VkDevice device,
703                            const VkShaderModuleCreateInfo*,
704                            const VkAllocationCallbacks* /*allocator*/,
705                            VkShaderModule* module) {
706    *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
707    return VK_SUCCESS;
708}
709
710VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
711                                         VkFormat,
712                                         VkImageUsageFlags,
713                                         int* grallocUsage) {
714    // The null driver never reads or writes the gralloc buffer
715    *grallocUsage = 0;
716    return VK_SUCCESS;
717}
718
719VkResult AcquireImageANDROID(VkDevice,
720                             VkImage,
721                             int fence,
722                             VkSemaphore,
723                             VkFence) {
724    close(fence);
725    return VK_SUCCESS;
726}
727
728VkResult QueueSignalReleaseImageANDROID(VkQueue,
729                                        uint32_t,
730                                        const VkSemaphore*,
731                                        VkImage,
732                                        int* fence) {
733    *fence = -1;
734    return VK_SUCCESS;
735}
736
737// -----------------------------------------------------------------------------
738// No-op entrypoints
739
740// clang-format off
741#pragma clang diagnostic push
742#pragma clang diagnostic ignored "-Wunused-parameter"
743
744void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
745    ALOGV("TODO: vk%s", __FUNCTION__);
746}
747
748void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
749    ALOGV("TODO: vk%s", __FUNCTION__);
750}
751
752VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
753    ALOGV("TODO: vk%s", __FUNCTION__);
754    return VK_SUCCESS;
755}
756
757VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
758    ALOGV("TODO: vk%s", __FUNCTION__);
759    return VK_SUCCESS;
760}
761
762VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
763    ALOGV("TODO: vk%s", __FUNCTION__);
764    return VK_SUCCESS;
765}
766
767VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
768    ALOGV("TODO: vk%s", __FUNCTION__);
769    return VK_SUCCESS;
770}
771
772VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
773    return VK_SUCCESS;
774}
775
776VkResult QueueWaitIdle(VkQueue queue) {
777    ALOGV("TODO: vk%s", __FUNCTION__);
778    return VK_SUCCESS;
779}
780
781VkResult DeviceWaitIdle(VkDevice device) {
782    ALOGV("TODO: vk%s", __FUNCTION__);
783    return VK_SUCCESS;
784}
785
786void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
787}
788
789VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
790    ALOGV("TODO: vk%s", __FUNCTION__);
791    return VK_SUCCESS;
792}
793
794VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
795    ALOGV("TODO: vk%s", __FUNCTION__);
796    return VK_SUCCESS;
797}
798
799void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
800    ALOGV("TODO: vk%s", __FUNCTION__);
801}
802
803VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
804    return VK_SUCCESS;
805}
806
807VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
808    return VK_SUCCESS;
809}
810
811void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
812    ALOGV("TODO: vk%s", __FUNCTION__);
813}
814
815void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
816    ALOGV("TODO: vk%s", __FUNCTION__);
817}
818
819VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
820    ALOGV("TODO: vk%s", __FUNCTION__);
821    return VK_SUCCESS;
822}
823
824void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
825}
826
827VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
828    return VK_SUCCESS;
829}
830
831VkResult GetFenceStatus(VkDevice device, VkFence fence) {
832    ALOGV("TODO: vk%s", __FUNCTION__);
833    return VK_SUCCESS;
834}
835
836VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
837    return VK_SUCCESS;
838}
839
840void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
841}
842
843void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
844}
845
846VkResult GetEventStatus(VkDevice device, VkEvent event) {
847    ALOGV("TODO: vk%s", __FUNCTION__);
848    return VK_SUCCESS;
849}
850
851VkResult SetEvent(VkDevice device, VkEvent event) {
852    ALOGV("TODO: vk%s", __FUNCTION__);
853    return VK_SUCCESS;
854}
855
856VkResult ResetEvent(VkDevice device, VkEvent event) {
857    ALOGV("TODO: vk%s", __FUNCTION__);
858    return VK_SUCCESS;
859}
860
861void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
862}
863
864VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
865    ALOGV("TODO: vk%s", __FUNCTION__);
866    return VK_SUCCESS;
867}
868
869void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
870}
871
872void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
873    ALOGV("TODO: vk%s", __FUNCTION__);
874}
875
876void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
877}
878
879void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
880}
881
882void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
883}
884
885VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
886    ALOGV("TODO: vk%s", __FUNCTION__);
887    return VK_SUCCESS;
888}
889
890VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
891    ALOGV("TODO: vk%s", __FUNCTION__);
892    return VK_SUCCESS;
893}
894
895void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
896}
897
898void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
899}
900
901void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
902}
903
904void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
905}
906
907void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
908}
909
910VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
911    ALOGV("TODO: vk%s", __FUNCTION__);
912    return VK_SUCCESS;
913}
914
915void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
916    ALOGV("TODO: vk%s", __FUNCTION__);
917}
918
919VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
920    ALOGV("TODO: vk%s", __FUNCTION__);
921    return VK_SUCCESS;
922}
923
924void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
925}
926
927void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
928}
929
930void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
931    ALOGV("TODO: vk%s", __FUNCTION__);
932}
933
934VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
935    ALOGV("TODO: vk%s", __FUNCTION__);
936    return VK_SUCCESS;
937}
938
939VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
940    return VK_SUCCESS;
941}
942
943VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
944    return VK_SUCCESS;
945}
946
947VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
948    ALOGV("TODO: vk%s", __FUNCTION__);
949    return VK_SUCCESS;
950}
951
952void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
953}
954
955void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
956}
957
958void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
959}
960
961void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
962}
963
964void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
965}
966
967void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
968}
969
970void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
971}
972
973void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
974}
975
976void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
977}
978
979void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
980}
981
982void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
983}
984
985void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
986}
987
988void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
989}
990
991void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
992}
993
994void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
995}
996
997void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
998}
999
1000void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1001}
1002
1003void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1004}
1005
1006void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1007}
1008
1009void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1010}
1011
1012void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1013}
1014
1015void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1016}
1017
1018void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1019}
1020
1021void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1022}
1023
1024void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1025}
1026
1027void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1028}
1029
1030void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1031}
1032
1033void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1034}
1035
1036void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1037}
1038
1039void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1040}
1041
1042void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1043}
1044
1045void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1046}
1047
1048void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1049}
1050
1051void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1052}
1053
1054void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1055}
1056
1057void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1058}
1059
1060void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1061}
1062
1063void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1064}
1065
1066void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1067}
1068
1069void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1070}
1071
1072void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1073}
1074
1075void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1076}
1077
1078void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1079}
1080
1081void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1082}
1083
1084#pragma clang diagnostic pop
1085// clang-format on
1086
1087}  // namespace null_driver
1088