null_driver.cpp revision 5ae3abb3ca6728de04935b0c81bcdbdfc37b0d47
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <array>
20#include <algorithm>
21#include <inttypes.h>
22#include <string.h>
23
24// #define LOG_NDEBUG 0
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    const VkAllocCallbacks* alloc;
39    VkPhysicalDevice_T physical_device;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCmdBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kCmdPool,
71    kDescriptorPool,
72    kDescriptorSet,
73    kDescriptorSetLayout,
74    kEvent,
75    kFence,
76    kFramebuffer,
77    kImageView,
78    kPipeline,
79    kPipelineCache,
80    kPipelineLayout,
81    kQueryPool,
82    kRenderPass,
83    kSampler,
84    kSemaphore,
85    kShader,
86    kShaderModule,
87
88    kNumTypes
89};
90}  // namespace HandleType
91uint64_t AllocHandle(VkDevice device, HandleType::Enum type);
92
93const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
94
95}  // anonymous namespace
96
97struct VkDevice_T {
98    hwvulkan_dispatch_t dispatch;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
134                        VkInstance* out_instance) {
135    // Assume the loader provided alloc callbacks even if the app didn't.
136    ALOG_ASSERT(
137        create_info->pAllocCb,
138        "Missing alloc callbacks, loader or app should have provided them");
139
140    VkInstance_T* instance =
141        static_cast<VkInstance_T*>(create_info->pAllocCb->pfnAlloc(
142            create_info->pAllocCb->pUserData, sizeof(VkInstance_T),
143            alignof(VkInstance_T), VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
144    if (!instance)
145        return VK_ERROR_OUT_OF_HOST_MEMORY;
146
147    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
148    instance->alloc = create_info->pAllocCb;
149    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
150
151    *out_instance = instance;
152    return VK_SUCCESS;
153}
154
155int CloseDevice(struct hw_device_t* /*device*/) {
156    // nothing to do - opening a device doesn't allocate any resources
157    return 0;
158}
159
160hwvulkan_device_t nulldrv_device = {
161    .common =
162        {
163            .tag = HARDWARE_DEVICE_TAG,
164            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
165            .module = &HAL_MODULE_INFO_SYM.common,
166            .close = CloseDevice,
167        },
168    .EnumerateInstanceExtensionProperties =
169        EnumerateInstanceExtensionProperties,
170    .CreateInstance = CreateInstance,
171    .GetInstanceProcAddr = GetInstanceProcAddr};
172
173int OpenDevice(const hw_module_t* /*module*/,
174               const char* id,
175               hw_device_t** device) {
176    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
177        *device = &nulldrv_device.common;
178        return 0;
179    }
180    return -ENOENT;
181}
182
183VkInstance_T* GetInstanceFromPhysicalDevice(
184    VkPhysicalDevice_T* physical_device) {
185    return reinterpret_cast<VkInstance_T*>(
186        reinterpret_cast<uintptr_t>(physical_device) -
187        offsetof(VkInstance_T, physical_device));
188}
189
190uint64_t AllocHandle(VkDevice device, HandleType::Enum type) {
191    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
192    ALOGE_IF(device->next_handle[type] == kHandleMask,
193             "non-dispatchable handles of type=%u are about to overflow", type);
194    return (UINT64_C(1) << 63) | ((uint64_t(type) & 0x7) << 56) |
195           (device->next_handle[type]++ & kHandleMask);
196}
197
198}  // namespace
199
200namespace null_driver {
201
202template <typename HandleT>
203struct HandleTraits {};
204
205template <typename HandleT>
206typename HandleTraits<HandleT>::PointerType GetObjectFromHandle(
207    const HandleT& h) {
208    return reinterpret_cast<typename HandleTraits<HandleT>::PointerType>(
209        uintptr_t(h.handle));
210}
211
212template <typename T>
213typename T::HandleType GetHandleToObject(const T* obj) {
214    return typename T::HandleType(reinterpret_cast<uintptr_t>(obj));
215}
216
217// -----------------------------------------------------------------------------
218// Global
219
220VkResult EnumerateInstanceExtensionProperties(const char*,
221                                              uint32_t* count,
222                                              VkExtensionProperties*) {
223    *count = 0;
224    return VK_SUCCESS;
225}
226
227PFN_vkVoidFunction GetInstanceProcAddr(VkInstance, const char* name) {
228    PFN_vkVoidFunction proc = LookupInstanceProcAddr(name);
229    if (!proc && strcmp(name, "vkGetDeviceProcAddr") == 0)
230        proc = reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr);
231    return proc;
232}
233
234PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
235    PFN_vkVoidFunction proc = LookupDeviceProcAddr(name);
236    if (proc)
237        return proc;
238    if (strcmp(name, "vkImportNativeFenceANDROID") == 0)
239        return reinterpret_cast<PFN_vkVoidFunction>(ImportNativeFenceANDROID);
240    if (strcmp(name, "vkQueueSignalNativeFenceANDROID") == 0)
241        return reinterpret_cast<PFN_vkVoidFunction>(
242            QueueSignalNativeFenceANDROID);
243    return nullptr;
244}
245
246// -----------------------------------------------------------------------------
247// Instance
248
249VkResult DestroyInstance(VkInstance instance) {
250    instance->alloc->pfnFree(instance->alloc->pUserData, instance);
251    return VK_SUCCESS;
252}
253
254// -----------------------------------------------------------------------------
255// PhysicalDevice
256
257VkResult EnumeratePhysicalDevices(VkInstance instance,
258                                  uint32_t* physical_device_count,
259                                  VkPhysicalDevice* physical_devices) {
260    if (physical_devices && *physical_device_count >= 1)
261        physical_devices[0] = &instance->physical_device;
262    *physical_device_count = 1;
263    return VK_SUCCESS;
264}
265
266VkResult GetPhysicalDeviceProperties(VkPhysicalDevice,
267                                     VkPhysicalDeviceProperties* properties) {
268    properties->apiVersion = VK_API_VERSION;
269    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
270    properties->vendorId = 0;
271    properties->deviceId = 0;
272    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
273    strcpy(properties->deviceName, "Android Vulkan Null Driver");
274    memset(properties->pipelineCacheUUID, 0,
275           sizeof(properties->pipelineCacheUUID));
276    return VK_SUCCESS;
277}
278
279VkResult GetPhysicalDeviceQueueFamilyProperties(
280    VkPhysicalDevice,
281    uint32_t* count,
282    VkQueueFamilyProperties* properties) {
283    if (properties) {
284        if (*count < 1)
285            return VK_INCOMPLETE;
286        properties->queueFlags =
287            VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_DMA_BIT;
288        properties->queueCount = 1;
289        properties->supportsTimestamps = VK_FALSE;
290    }
291    *count = 1;
292    return VK_SUCCESS;
293}
294
295VkResult GetPhysicalDeviceMemoryProperties(
296    VkPhysicalDevice,
297    VkPhysicalDeviceMemoryProperties* properties) {
298    properties->memoryTypeCount = 1;
299    properties->memoryTypes[0].propertyFlags =
300        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
301    properties->memoryTypes[0].heapIndex = 0;
302    properties->memoryHeapCount = 1;
303    properties->memoryHeaps[0].size = kMaxDeviceMemory;
304    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_HOST_LOCAL_BIT;
305    return VK_SUCCESS;
306}
307
308// -----------------------------------------------------------------------------
309// Device
310
311VkResult CreateDevice(VkPhysicalDevice physical_device,
312                      const VkDeviceCreateInfo*,
313                      VkDevice* out_device) {
314    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
315    VkDevice_T* device = static_cast<VkDevice_T*>(instance->alloc->pfnAlloc(
316        instance->alloc->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
317        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
318    if (!device)
319        return VK_ERROR_OUT_OF_HOST_MEMORY;
320
321    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
322    device->instance = instance;
323    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
324    std::fill(device->next_handle.begin(), device->next_handle.end(),
325              UINT64_C(0));
326
327    *out_device = device;
328    return VK_SUCCESS;
329}
330
331VkResult DestroyDevice(VkDevice device) {
332    if (!device)
333        return VK_SUCCESS;
334    const VkAllocCallbacks* alloc = device->instance->alloc;
335    alloc->pfnFree(alloc->pUserData, device);
336    return VK_SUCCESS;
337}
338
339VkResult GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
340    *queue = &device->queue;
341    return VK_SUCCESS;
342}
343
344// -----------------------------------------------------------------------------
345// CmdBuffer
346
347VkResult CreateCommandBuffer(VkDevice device,
348                             const VkCmdBufferCreateInfo*,
349                             VkCmdBuffer* out_cmdbuf) {
350    const VkAllocCallbacks* alloc = device->instance->alloc;
351    VkCmdBuffer_T* cmdbuf = static_cast<VkCmdBuffer_T*>(alloc->pfnAlloc(
352        alloc->pUserData, sizeof(VkCmdBuffer_T), alignof(VkCmdBuffer_T),
353        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
354    if (!cmdbuf)
355        return VK_ERROR_OUT_OF_HOST_MEMORY;
356    cmdbuf->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
357    *out_cmdbuf = cmdbuf;
358    return VK_SUCCESS;
359}
360
361VkResult DestroyCommandBuffer(VkDevice device, VkCmdBuffer cmdbuf) {
362    const VkAllocCallbacks* alloc = device->instance->alloc;
363    alloc->pfnFree(alloc->pUserData, cmdbuf);
364    return VK_SUCCESS;
365}
366
367// -----------------------------------------------------------------------------
368// DeviceMemory
369
370struct DeviceMemory {
371    typedef VkDeviceMemory HandleType;
372    VkDeviceSize size;
373    alignas(16) uint8_t data[0];
374};
375template <>
376struct HandleTraits<VkDeviceMemory> {
377    typedef DeviceMemory* PointerType;
378};
379
380VkResult AllocMemory(VkDevice device,
381                     const VkMemoryAllocInfo* alloc_info,
382                     VkDeviceMemory* mem_handle) {
383    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
384        return VK_ERROR_OUT_OF_HOST_MEMORY;
385
386    const VkAllocCallbacks* alloc = device->instance->alloc;
387    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
388    DeviceMemory* mem = static_cast<DeviceMemory*>(
389        alloc->pfnAlloc(alloc->pUserData, size, alignof(DeviceMemory),
390                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
391    if (!mem)
392        return VK_ERROR_OUT_OF_HOST_MEMORY;
393    mem->size = size;
394    *mem_handle = GetHandleToObject(mem);
395    return VK_SUCCESS;
396}
397
398VkResult FreeMemory(VkDevice device, VkDeviceMemory mem_handle) {
399    const VkAllocCallbacks* alloc = device->instance->alloc;
400    DeviceMemory* mem = GetObjectFromHandle(mem_handle);
401    alloc->pfnFree(alloc->pUserData, mem);
402    return VK_SUCCESS;
403}
404
405VkResult MapMemory(VkDevice,
406                   VkDeviceMemory mem_handle,
407                   VkDeviceSize offset,
408                   VkDeviceSize,
409                   VkMemoryMapFlags,
410                   void** out_ptr) {
411    DeviceMemory* mem = GetObjectFromHandle(mem_handle);
412    *out_ptr = &mem->data[0] + offset;
413    return VK_SUCCESS;
414}
415
416// -----------------------------------------------------------------------------
417// Buffer
418
419struct Buffer {
420    typedef VkBuffer HandleType;
421    VkDeviceSize size;
422};
423template <>
424struct HandleTraits<VkBuffer> {
425    typedef Buffer* PointerType;
426};
427
428VkResult CreateBuffer(VkDevice device,
429                      const VkBufferCreateInfo* create_info,
430                      VkBuffer* buffer_handle) {
431    ALOGW_IF(create_info->size > kMaxDeviceMemory,
432             "CreateBuffer: requested size 0x%" PRIx64
433             " exceeds max device memory size 0x%" PRIx64,
434             create_info->size, kMaxDeviceMemory);
435
436    const VkAllocCallbacks* alloc = device->instance->alloc;
437    Buffer* buffer = static_cast<Buffer*>(
438        alloc->pfnAlloc(alloc->pUserData, sizeof(Buffer), alignof(Buffer),
439                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
440    if (!buffer)
441        return VK_ERROR_OUT_OF_HOST_MEMORY;
442    buffer->size = create_info->size;
443    *buffer_handle = GetHandleToObject(buffer);
444    return VK_SUCCESS;
445}
446
447VkResult GetBufferMemoryRequirements(VkDevice,
448                                     VkBuffer buffer_handle,
449                                     VkMemoryRequirements* requirements) {
450    Buffer* buffer = GetObjectFromHandle(buffer_handle);
451    requirements->size = buffer->size;
452    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
453    requirements->memoryTypeBits = 0x1;
454    return VK_SUCCESS;
455}
456
457VkResult DestroyBuffer(VkDevice device, VkBuffer buffer_handle) {
458    const VkAllocCallbacks* alloc = device->instance->alloc;
459    Buffer* buffer = GetObjectFromHandle(buffer_handle);
460    alloc->pfnFree(alloc->pUserData, buffer);
461    return VK_SUCCESS;
462}
463
464// -----------------------------------------------------------------------------
465// Image
466
467struct Image {
468    typedef VkImage HandleType;
469    VkDeviceSize size;
470};
471template <>
472struct HandleTraits<VkImage> {
473    typedef Image* PointerType;
474};
475
476VkResult CreateImage(VkDevice device,
477                     const VkImageCreateInfo* create_info,
478                     VkImage* image_handle) {
479    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
480        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
481        create_info->mipLevels != 1) {
482        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
483              create_info->imageType, create_info->format,
484              create_info->mipLevels);
485        return VK_UNSUPPORTED;
486    }
487
488    VkDeviceSize size =
489        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
490        create_info->arraySize * create_info->samples * 4u;
491    ALOGW_IF(size > kMaxDeviceMemory,
492             "CreateImage: image size 0x%" PRIx64
493             " exceeds max device memory size 0x%" PRIx64,
494             size, kMaxDeviceMemory);
495
496    const VkAllocCallbacks* alloc = device->instance->alloc;
497    Image* image = static_cast<Image*>(
498        alloc->pfnAlloc(alloc->pUserData, sizeof(Image), alignof(Image),
499                        VK_SYSTEM_ALLOC_TYPE_API_OBJECT));
500    if (!image)
501        return VK_ERROR_OUT_OF_HOST_MEMORY;
502    image->size = size;
503    *image_handle = GetHandleToObject(image);
504    return VK_SUCCESS;
505}
506
507VkResult GetImageMemoryRequirements(VkDevice,
508                                    VkImage image_handle,
509                                    VkMemoryRequirements* requirements) {
510    Image* image = GetObjectFromHandle(image_handle);
511    requirements->size = image->size;
512    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
513    requirements->memoryTypeBits = 0x1;
514    return VK_SUCCESS;
515}
516
517VkResult DestroyImage(VkDevice device, VkImage image_handle) {
518    const VkAllocCallbacks* alloc = device->instance->alloc;
519    Image* image = GetObjectFromHandle(image_handle);
520    alloc->pfnFree(alloc->pUserData, image);
521    return VK_SUCCESS;
522}
523
524// -----------------------------------------------------------------------------
525// No-op types
526
527VkResult CreateBufferView(VkDevice device,
528                          const VkBufferViewCreateInfo*,
529                          VkBufferView* view) {
530    *view = AllocHandle(device, HandleType::kBufferView);
531    return VK_SUCCESS;
532}
533
534VkResult CreateCommandPool(VkDevice device,
535                           const VkCmdPoolCreateInfo*,
536                           VkCmdPool* pool) {
537    *pool = AllocHandle(device, HandleType::kCmdPool);
538    return VK_SUCCESS;
539}
540
541VkResult CreateDescriptorPool(VkDevice device,
542                              const VkDescriptorPoolCreateInfo*,
543                              VkDescriptorPool* pool) {
544    *pool = AllocHandle(device, HandleType::kDescriptorPool);
545    return VK_SUCCESS;
546}
547
548VkResult AllocDescriptorSets(VkDevice device,
549                             VkDescriptorPool,
550                             VkDescriptorSetUsage,
551                             uint32_t count,
552                             const VkDescriptorSetLayout*,
553                             VkDescriptorSet* sets,
554                             uint32_t* out_count) {
555    for (uint32_t i = 0; i < count; i++)
556        sets[i] = AllocHandle(device, HandleType::kDescriptorSet);
557    *out_count = count;
558    return VK_SUCCESS;
559}
560
561VkResult CreateDescriptorSetLayout(VkDevice device,
562                                   const VkDescriptorSetLayoutCreateInfo*,
563                                   VkDescriptorSetLayout* layout) {
564    *layout = AllocHandle(device, HandleType::kDescriptorSetLayout);
565    return VK_SUCCESS;
566}
567
568VkResult CreateEvent(VkDevice device,
569                     const VkEventCreateInfo*,
570                     VkEvent* event) {
571    *event = AllocHandle(device, HandleType::kEvent);
572    return VK_SUCCESS;
573}
574
575VkResult CreateFence(VkDevice device,
576                     const VkFenceCreateInfo*,
577                     VkFence* fence) {
578    *fence = AllocHandle(device, HandleType::kFence);
579    return VK_SUCCESS;
580}
581
582VkResult CreateFramebuffer(VkDevice device,
583                           const VkFramebufferCreateInfo*,
584                           VkFramebuffer* framebuffer) {
585    *framebuffer = AllocHandle(device, HandleType::kFramebuffer);
586    return VK_SUCCESS;
587}
588
589VkResult CreateImageView(VkDevice device,
590                         const VkImageViewCreateInfo*,
591                         VkImageView* view) {
592    *view = AllocHandle(device, HandleType::kImageView);
593    return VK_SUCCESS;
594}
595
596VkResult CreateGraphicsPipelines(VkDevice device,
597                                 VkPipelineCache,
598                                 uint32_t count,
599                                 const VkGraphicsPipelineCreateInfo*,
600                                 VkPipeline* pipelines) {
601    for (uint32_t i = 0; i < count; i++)
602        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
603    return VK_SUCCESS;
604}
605
606VkResult CreateComputePipelines(VkDevice device,
607                                VkPipelineCache,
608                                uint32_t count,
609                                const VkComputePipelineCreateInfo*,
610                                VkPipeline* pipelines) {
611    for (uint32_t i = 0; i < count; i++)
612        pipelines[i] = AllocHandle(device, HandleType::kPipeline);
613    return VK_SUCCESS;
614}
615
616VkResult CreatePipelineCache(VkDevice device,
617                             const VkPipelineCacheCreateInfo*,
618                             VkPipelineCache* cache) {
619    *cache = AllocHandle(device, HandleType::kPipelineCache);
620    return VK_SUCCESS;
621}
622
623VkResult CreatePipelineLayout(VkDevice device,
624                              const VkPipelineLayoutCreateInfo*,
625                              VkPipelineLayout* layout) {
626    *layout = AllocHandle(device, HandleType::kPipelineLayout);
627    return VK_SUCCESS;
628}
629
630VkResult CreateQueryPool(VkDevice device,
631                         const VkQueryPoolCreateInfo*,
632                         VkQueryPool* pool) {
633    *pool = AllocHandle(device, HandleType::kQueryPool);
634    return VK_SUCCESS;
635}
636
637VkResult CreateRenderPass(VkDevice device,
638                          const VkRenderPassCreateInfo*,
639                          VkRenderPass* renderpass) {
640    *renderpass = AllocHandle(device, HandleType::kRenderPass);
641    return VK_SUCCESS;
642}
643
644VkResult CreateSampler(VkDevice device,
645                       const VkSamplerCreateInfo*,
646                       VkSampler* sampler) {
647    *sampler = AllocHandle(device, HandleType::kSampler);
648    return VK_SUCCESS;
649}
650
651VkResult CreateSemaphore(VkDevice device,
652                         const VkSemaphoreCreateInfo*,
653                         VkSemaphore* semaphore) {
654    *semaphore = AllocHandle(device, HandleType::kSemaphore);
655    return VK_SUCCESS;
656}
657
658VkResult CreateShader(VkDevice device,
659                      const VkShaderCreateInfo*,
660                      VkShader* shader) {
661    *shader = AllocHandle(device, HandleType::kShader);
662    return VK_SUCCESS;
663}
664
665VkResult CreateShaderModule(VkDevice device,
666                            const VkShaderModuleCreateInfo*,
667                            VkShaderModule* module) {
668    *module = AllocHandle(device, HandleType::kShaderModule);
669    return VK_SUCCESS;
670}
671
672VkResult ImportNativeFenceANDROID(VkDevice, VkSemaphore, int fence) {
673    close(fence);
674    return VK_SUCCESS;
675}
676
677VkResult QueueSignalNativeFenceANDROID(VkQueue, int* fence) {
678    *fence = -1;
679    return VK_SUCCESS;
680}
681
682// -----------------------------------------------------------------------------
683// No-op entrypoints
684
685// clang-format off
686#pragma clang diagnostic push
687#pragma clang diagnostic ignored "-Wunused-parameter"
688
689VkResult GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
690    ALOGV("TODO: vk%s", __FUNCTION__);
691    return VK_SUCCESS;
692}
693
694VkResult GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
695    ALOGV("TODO: vk%s", __FUNCTION__);
696    return VK_SUCCESS;
697}
698
699VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageFormatProperties* pImageFormatProperties) {
700    ALOGV("TODO: vk%s", __FUNCTION__);
701    return VK_SUCCESS;
702}
703
704VkResult GetPhysicalDeviceLimits(VkPhysicalDevice physicalDevice, VkPhysicalDeviceLimits* pLimits) {
705    ALOGV("TODO: vk%s", __FUNCTION__);
706    return VK_SUCCESS;
707}
708
709VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
710    ALOGV("TODO: vk%s", __FUNCTION__);
711    return VK_SUCCESS;
712}
713
714VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkLayerProperties* pProperties) {
715    ALOGV("TODO: vk%s", __FUNCTION__);
716    return VK_SUCCESS;
717}
718
719VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pCount, VkExtensionProperties* pProperties) {
720    ALOGV("TODO: vk%s", __FUNCTION__);
721    return VK_SUCCESS;
722}
723
724VkResult QueueSubmit(VkQueue queue, uint32_t cmdBufferCount, const VkCmdBuffer* pCmdBuffers, VkFence fence) {
725    return VK_SUCCESS;
726}
727
728VkResult QueueWaitIdle(VkQueue queue) {
729    ALOGV("TODO: vk%s", __FUNCTION__);
730    return VK_SUCCESS;
731}
732
733VkResult DeviceWaitIdle(VkDevice device) {
734    ALOGV("TODO: vk%s", __FUNCTION__);
735    return VK_SUCCESS;
736}
737
738VkResult UnmapMemory(VkDevice device, VkDeviceMemory mem) {
739    return VK_SUCCESS;
740}
741
742VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
743    ALOGV("TODO: vk%s", __FUNCTION__);
744    return VK_SUCCESS;
745}
746
747VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
748    ALOGV("TODO: vk%s", __FUNCTION__);
749    return VK_SUCCESS;
750}
751
752VkResult GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
753    ALOGV("TODO: vk%s", __FUNCTION__);
754    return VK_SUCCESS;
755}
756
757VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
758    return VK_SUCCESS;
759}
760
761VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
762    return VK_SUCCESS;
763}
764
765VkResult GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
766    ALOGV("TODO: vk%s", __FUNCTION__);
767    return VK_SUCCESS;
768}
769
770VkResult GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, uint32_t samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
771    ALOGV("TODO: vk%s", __FUNCTION__);
772    return VK_SUCCESS;
773}
774
775VkResult QueueBindSparseBufferMemory(VkQueue queue, VkBuffer buffer, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
776    ALOGV("TODO: vk%s", __FUNCTION__);
777    return VK_SUCCESS;
778}
779
780VkResult QueueBindSparseImageOpaqueMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseMemoryBindInfo* pBindInfo) {
781    ALOGV("TODO: vk%s", __FUNCTION__);
782    return VK_SUCCESS;
783}
784
785VkResult QueueBindSparseImageMemory(VkQueue queue, VkImage image, uint32_t numBindings, const VkSparseImageMemoryBindInfo* pBindInfo) {
786    ALOGV("TODO: vk%s", __FUNCTION__);
787    return VK_SUCCESS;
788}
789
790VkResult DestroyFence(VkDevice device, VkFence fence) {
791    return VK_SUCCESS;
792}
793
794VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
795    return VK_SUCCESS;
796}
797
798VkResult GetFenceStatus(VkDevice device, VkFence fence) {
799    ALOGV("TODO: vk%s", __FUNCTION__);
800    return VK_SUCCESS;
801}
802
803VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
804    return VK_SUCCESS;
805}
806
807VkResult DestroySemaphore(VkDevice device, VkSemaphore semaphore) {
808    return VK_SUCCESS;
809}
810
811VkResult QueueSignalSemaphore(VkQueue queue, VkSemaphore semaphore) {
812    ALOGV("TODO: vk%s", __FUNCTION__);
813    return VK_SUCCESS;
814}
815
816VkResult QueueWaitSemaphore(VkQueue queue, VkSemaphore semaphore) {
817    return VK_SUCCESS;
818}
819
820VkResult DestroyEvent(VkDevice device, VkEvent event) {
821    return VK_SUCCESS;
822}
823
824VkResult GetEventStatus(VkDevice device, VkEvent event) {
825    ALOGV("TODO: vk%s", __FUNCTION__);
826    return VK_SUCCESS;
827}
828
829VkResult SetEvent(VkDevice device, VkEvent event) {
830    ALOGV("TODO: vk%s", __FUNCTION__);
831    return VK_SUCCESS;
832}
833
834VkResult ResetEvent(VkDevice device, VkEvent event) {
835    ALOGV("TODO: vk%s", __FUNCTION__);
836    return VK_SUCCESS;
837}
838
839VkResult DestroyQueryPool(VkDevice device, VkQueryPool queryPool) {
840    return VK_SUCCESS;
841}
842
843VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t* pDataSize, void* pData, VkQueryResultFlags flags) {
844    ALOGV("TODO: vk%s", __FUNCTION__);
845    return VK_SUCCESS;
846}
847
848VkResult DestroyBufferView(VkDevice device, VkBufferView bufferView) {
849    return VK_SUCCESS;
850}
851
852VkResult GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
853    ALOGV("TODO: vk%s", __FUNCTION__);
854    return VK_SUCCESS;
855}
856
857VkResult DestroyImageView(VkDevice device, VkImageView imageView) {
858    return VK_SUCCESS;
859}
860
861VkResult DestroyShaderModule(VkDevice device, VkShaderModule shaderModule) {
862    return VK_SUCCESS;
863}
864
865VkResult DestroyShader(VkDevice device, VkShader shader) {
866    return VK_SUCCESS;
867}
868
869VkResult DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache) {
870    return VK_SUCCESS;
871}
872
873size_t GetPipelineCacheSize(VkDevice device, VkPipelineCache pipelineCache) {
874    ALOGV("TODO: vk%s", __FUNCTION__);
875    return VK_SUCCESS;
876}
877
878VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, void* pData) {
879    ALOGV("TODO: vk%s", __FUNCTION__);
880    return VK_SUCCESS;
881}
882
883VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
884    ALOGV("TODO: vk%s", __FUNCTION__);
885    return VK_SUCCESS;
886}
887
888VkResult DestroyPipeline(VkDevice device, VkPipeline pipeline) {
889    return VK_SUCCESS;
890}
891
892VkResult DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout) {
893    return VK_SUCCESS;
894}
895
896VkResult DestroySampler(VkDevice device, VkSampler sampler) {
897    return VK_SUCCESS;
898}
899
900VkResult DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout) {
901    return VK_SUCCESS;
902}
903
904VkResult DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool) {
905    return VK_SUCCESS;
906}
907
908VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool) {
909    ALOGV("TODO: vk%s", __FUNCTION__);
910    return VK_SUCCESS;
911}
912
913VkResult UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
914    ALOGV("TODO: vk%s", __FUNCTION__);
915    return VK_SUCCESS;
916}
917
918VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
919    ALOGV("TODO: vk%s", __FUNCTION__);
920    return VK_SUCCESS;
921}
922
923VkResult DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) {
924    return VK_SUCCESS;
925}
926
927VkResult DestroyRenderPass(VkDevice device, VkRenderPass renderPass) {
928    return VK_SUCCESS;
929}
930
931VkResult GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
932    ALOGV("TODO: vk%s", __FUNCTION__);
933    return VK_SUCCESS;
934}
935
936VkResult DestroyCommandPool(VkDevice device, VkCmdPool cmdPool) {
937    return VK_SUCCESS;
938}
939
940VkResult ResetCommandPool(VkDevice device, VkCmdPool cmdPool, VkCmdPoolResetFlags flags) {
941    ALOGV("TODO: vk%s", __FUNCTION__);
942    return VK_SUCCESS;
943}
944
945VkResult BeginCommandBuffer(VkCmdBuffer cmdBuffer, const VkCmdBufferBeginInfo* pBeginInfo) {
946    return VK_SUCCESS;
947}
948
949VkResult EndCommandBuffer(VkCmdBuffer cmdBuffer) {
950    return VK_SUCCESS;
951}
952
953VkResult ResetCommandBuffer(VkCmdBuffer cmdBuffer, VkCmdBufferResetFlags flags) {
954    ALOGV("TODO: vk%s", __FUNCTION__);
955    return VK_SUCCESS;
956}
957
958void CmdBindPipeline(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
959}
960
961void CmdSetViewport(VkCmdBuffer cmdBuffer, uint32_t viewportCount, const VkViewport* pViewports) {
962}
963
964void CmdSetScissor(VkCmdBuffer cmdBuffer, uint32_t scissorCount, const VkRect2D* pScissors) {
965}
966
967void CmdSetLineWidth(VkCmdBuffer cmdBuffer, float lineWidth) {
968}
969
970void CmdSetDepthBias(VkCmdBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
971}
972
973void CmdSetBlendConstants(VkCmdBuffer cmdBuffer, const float blendConst[4]) {
974}
975
976void CmdSetDepthBounds(VkCmdBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
977}
978
979void CmdSetStencilCompareMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
980}
981
982void CmdSetStencilWriteMask(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
983}
984
985void CmdSetStencilReference(VkCmdBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
986}
987
988void CmdBindDescriptorSets(VkCmdBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
989}
990
991void CmdBindIndexBuffer(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
992}
993
994void CmdBindVertexBuffers(VkCmdBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
995}
996
997void CmdDraw(VkCmdBuffer cmdBuffer, uint32_t firstVertex, uint32_t vertexCount, uint32_t firstInstance, uint32_t instanceCount) {
998}
999
1000void CmdDrawIndexed(VkCmdBuffer cmdBuffer, uint32_t firstIndex, uint32_t indexCount, int32_t vertexOffset, uint32_t firstInstance, uint32_t instanceCount) {
1001}
1002
1003void CmdDrawIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1004}
1005
1006void CmdDrawIndexedIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1007}
1008
1009void CmdDispatch(VkCmdBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1010}
1011
1012void CmdDispatchIndirect(VkCmdBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1013}
1014
1015void CmdCopyBuffer(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1016}
1017
1018void CmdCopyImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1019}
1020
1021void CmdBlitImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkTexFilter filter) {
1022}
1023
1024void CmdCopyBufferToImage(VkCmdBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1025}
1026
1027void CmdCopyImageToBuffer(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1028}
1029
1030void CmdUpdateBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1031}
1032
1033void CmdFillBuffer(VkCmdBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1034}
1035
1036void CmdClearColorImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1037}
1038
1039void CmdClearDepthStencilImage(VkCmdBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1040}
1041
1042void CmdClearColorAttachment(VkCmdBuffer cmdBuffer, uint32_t colorAttachment, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rectCount, const VkRect3D* pRects) {
1043}
1044
1045void CmdClearDepthStencilAttachment(VkCmdBuffer cmdBuffer, VkImageAspectFlags aspectMask, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rectCount, const VkRect3D* pRects) {
1046}
1047
1048void CmdResolveImage(VkCmdBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1049}
1050
1051void CmdSetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1052}
1053
1054void CmdResetEvent(VkCmdBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1055}
1056
1057void CmdWaitEvents(VkCmdBuffer cmdBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1058}
1059
1060void CmdPipelineBarrier(VkCmdBuffer cmdBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags destStageMask, VkBool32 byRegion, uint32_t memBarrierCount, const void* const* ppMemBarriers) {
1061}
1062
1063void CmdBeginQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1064}
1065
1066void CmdEndQuery(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1067}
1068
1069void CmdResetQueryPool(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1070}
1071
1072void CmdWriteTimestamp(VkCmdBuffer cmdBuffer, VkTimestampType timestampType, VkBuffer destBuffer, VkDeviceSize destOffset) {
1073}
1074
1075void CmdCopyQueryPoolResults(VkCmdBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1076}
1077
1078void CmdPushConstants(VkCmdBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1079}
1080
1081void CmdBeginRenderPass(VkCmdBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkRenderPassContents contents) {
1082}
1083
1084void CmdNextSubpass(VkCmdBuffer cmdBuffer, VkRenderPassContents contents) {
1085}
1086
1087void CmdEndRenderPass(VkCmdBuffer cmdBuffer) {
1088}
1089
1090void CmdExecuteCommands(VkCmdBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCmdBuffer* pCmdBuffers) {
1091}
1092
1093#pragma clang diagnostic pop
1094// clang-format on
1095
1096}  // namespace null_driver
1097