null_driver.cpp revision e2948d83f352b9a093252a06757f76a88f5355d3
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18
19#include <algorithm>
20#include <array>
21#include <inttypes.h>
22#include <string.h>
23
24#include <log/log.h>
25#include <utils/Errors.h>
26
27#include "null_driver_gen.h"
28
29using namespace null_driver;
30
31struct VkPhysicalDevice_T {
32    hwvulkan_dispatch_t dispatch;
33};
34
35struct VkInstance_T {
36    hwvulkan_dispatch_t dispatch;
37    VkAllocationCallbacks allocator;
38    VkPhysicalDevice_T physical_device;
39    uint64_t next_callback_handle;
40};
41
42struct VkQueue_T {
43    hwvulkan_dispatch_t dispatch;
44};
45
46struct VkCommandBuffer_T {
47    hwvulkan_dispatch_t dispatch;
48};
49
50namespace {
51// Handles for non-dispatchable objects are either pointers, or arbitrary
52// 64-bit non-zero values. We only use pointers when we need to keep state for
53// the object even in a null driver. For the rest, we form a handle as:
54//   [63:63] = 1 to distinguish from pointer handles*
55//   [62:56] = non-zero handle type enum value
56//   [55: 0] = per-handle-type incrementing counter
57// * This works because virtual addresses with the high bit set are reserved
58// for kernel data in all ABIs we run on.
59//
60// We never reclaim handles on vkDestroy*. It's not even necessary for us to
61// have distinct handles for live objects, and practically speaking we won't
62// ever create 2^56 objects of the same type from a single VkDevice in a null
63// driver.
64//
65// Using a namespace here instead of 'enum class' since we want scoped
66// constants but also want implicit conversions to integral types.
67namespace HandleType {
68enum Enum {
69    kBufferView,
70    kDebugReportCallbackEXT,
71    kDescriptorPool,
72    kDescriptorSet,
73    kDescriptorSetLayout,
74    kEvent,
75    kFence,
76    kFramebuffer,
77    kImageView,
78    kPipeline,
79    kPipelineCache,
80    kPipelineLayout,
81    kQueryPool,
82    kRenderPass,
83    kSampler,
84    kSemaphore,
85    kShaderModule,
86
87    kNumTypes
88};
89}  // namespace HandleType
90
91const VkDeviceSize kMaxDeviceMemory = 0x10000000;  // 256 MiB, arbitrary
92
93}  // anonymous namespace
94
95struct VkDevice_T {
96    hwvulkan_dispatch_t dispatch;
97    VkAllocationCallbacks allocator;
98    VkInstance_T* instance;
99    VkQueue_T queue;
100    std::array<uint64_t, HandleType::kNumTypes> next_handle;
101};
102
103// -----------------------------------------------------------------------------
104// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
105// later.
106
107namespace {
108int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
109hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
110}  // namespace
111
112#pragma clang diagnostic push
113#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
114__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
115    .common =
116        {
117            .tag = HARDWARE_MODULE_TAG,
118            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
119            .hal_api_version = HARDWARE_HAL_API_VERSION,
120            .id = HWVULKAN_HARDWARE_MODULE_ID,
121            .name = "Null Vulkan Driver",
122            .author = "The Android Open Source Project",
123            .methods = &nulldrv_module_methods,
124        },
125};
126#pragma clang diagnostic pop
127
128// -----------------------------------------------------------------------------
129
130namespace {
131
132int CloseDevice(struct hw_device_t* /*device*/) {
133    // nothing to do - opening a device doesn't allocate any resources
134    return 0;
135}
136
137hwvulkan_device_t nulldrv_device = {
138    .common =
139        {
140            .tag = HARDWARE_DEVICE_TAG,
141            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
142            .module = &HAL_MODULE_INFO_SYM.common,
143            .close = CloseDevice,
144        },
145    .EnumerateInstanceExtensionProperties =
146        EnumerateInstanceExtensionProperties,
147    .CreateInstance = CreateInstance,
148    .GetInstanceProcAddr = GetInstanceProcAddr};
149
150int OpenDevice(const hw_module_t* /*module*/,
151               const char* id,
152               hw_device_t** device) {
153    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
154        *device = &nulldrv_device.common;
155        return 0;
156    }
157    return -ENOENT;
158}
159
160VkInstance_T* GetInstanceFromPhysicalDevice(
161    VkPhysicalDevice_T* physical_device) {
162    return reinterpret_cast<VkInstance_T*>(
163        reinterpret_cast<uintptr_t>(physical_device) -
164        offsetof(VkInstance_T, physical_device));
165}
166
167uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
168    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
169    ALOGE_IF(*next_handle == kHandleMask,
170             "non-dispatchable handles of type=%" PRIu64
171             " are about to overflow",
172             type);
173    return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
174           ((*next_handle)++ & kHandleMask);
175}
176
177template <class Handle>
178Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
179    return reinterpret_cast<Handle>(
180        AllocHandle(type, &instance->next_callback_handle));
181}
182
183template <class Handle>
184Handle AllocHandle(VkDevice device, HandleType::Enum type) {
185    return reinterpret_cast<Handle>(
186        AllocHandle(type, &device->next_handle[type]));
187}
188
189}  // namespace
190
191namespace null_driver {
192
193#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
194    T* Get##T##FromHandle(Vk##T h);                     \
195    T* Get##T##FromHandle(Vk##T h) {                    \
196        return reinterpret_cast<T*>(uintptr_t(h));      \
197    }                                                   \
198    Vk##T GetHandleTo##T(const T* obj);                 \
199    Vk##T GetHandleTo##T(const T* obj) {                \
200        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
201    }
202
203// -----------------------------------------------------------------------------
204// Global
205
206VKAPI_ATTR
207VkResult EnumerateInstanceExtensionProperties(
208    const char* layer_name,
209    uint32_t* count,
210    VkExtensionProperties* properties) {
211    if (layer_name) {
212        ALOGW(
213            "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
214            "with a layer name ('%s')",
215            layer_name);
216    }
217
218// NOTE: Change this to zero to report and extension, which can be useful
219// for testing changes to the loader.
220#if 1
221    (void)properties;  // unused
222    *count = 0;
223    return VK_SUCCESS;
224#else
225    const VkExtensionProperties kExtensions[] = {
226        {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
227    const uint32_t kExtensionsCount =
228        sizeof(kExtensions) / sizeof(kExtensions[0]);
229
230    if (!properties || *count > kExtensionsCount)
231        *count = kExtensionsCount;
232    if (properties)
233        std::copy(kExtensions, kExtensions + *count, properties);
234    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
235#endif
236}
237
238VKAPI_ATTR
239VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
240                        const VkAllocationCallbacks* allocator,
241                        VkInstance* out_instance) {
242    // Assume the loader provided alloc callbacks even if the app didn't.
243    ALOG_ASSERT(
244        allocator,
245        "Missing alloc callbacks, loader or app should have provided them");
246
247    VkInstance_T* instance =
248        static_cast<VkInstance_T*>(allocator->pfnAllocation(
249            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
250            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
251    if (!instance)
252        return VK_ERROR_OUT_OF_HOST_MEMORY;
253
254    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
255    instance->allocator = *allocator;
256    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
257    instance->next_callback_handle = 0;
258
259    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
260        if (strcmp(create_info->ppEnabledExtensionNames[i],
261                   VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
262            ALOGV("instance extension '%s' requested",
263                  create_info->ppEnabledExtensionNames[i]);
264        } else {
265            ALOGW("unsupported extension '%s' requested",
266                  create_info->ppEnabledExtensionNames[i]);
267        }
268    }
269
270    *out_instance = instance;
271    return VK_SUCCESS;
272}
273
274VKAPI_ATTR
275PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
276    return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
277}
278
279VKAPI_ATTR
280PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
281    return GetInstanceProcAddr(name);
282}
283
284// -----------------------------------------------------------------------------
285// Instance
286
287void DestroyInstance(VkInstance instance,
288                     const VkAllocationCallbacks* /*allocator*/) {
289    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
290}
291
292// -----------------------------------------------------------------------------
293// PhysicalDevice
294
295VkResult EnumeratePhysicalDevices(VkInstance instance,
296                                  uint32_t* physical_device_count,
297                                  VkPhysicalDevice* physical_devices) {
298    if (physical_devices && *physical_device_count >= 1)
299        physical_devices[0] = &instance->physical_device;
300    *physical_device_count = 1;
301    return VK_SUCCESS;
302}
303
304VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
305                                        uint32_t* count,
306                                        VkLayerProperties* /*properties*/) {
307    ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
308    *count = 0;
309    return VK_SUCCESS;
310}
311
312VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
313                                            const char* layer_name,
314                                            uint32_t* count,
315                                            VkExtensionProperties* properties) {
316    if (layer_name) {
317        ALOGW(
318            "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
319            "with a layer name ('%s')",
320            layer_name);
321        *count = 0;
322        return VK_SUCCESS;
323    }
324
325    const VkExtensionProperties kExtensions[] = {
326        {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
327         VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
328    const uint32_t kExtensionsCount =
329        sizeof(kExtensions) / sizeof(kExtensions[0]);
330
331    if (!properties || *count > kExtensionsCount)
332        *count = kExtensionsCount;
333    if (properties)
334        std::copy(kExtensions, kExtensions + *count, properties);
335    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
336}
337
338void GetPhysicalDeviceProperties(VkPhysicalDevice,
339                                 VkPhysicalDeviceProperties* properties) {
340    properties->apiVersion = VK_API_VERSION;
341    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
342    properties->vendorID = 0;
343    properties->deviceID = 0;
344    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
345    strcpy(properties->deviceName, "Android Vulkan Null Driver");
346    memset(properties->pipelineCacheUUID, 0,
347           sizeof(properties->pipelineCacheUUID));
348    properties->limits = VkPhysicalDeviceLimits{
349        4096,     // maxImageDimension1D
350        4096,     // maxImageDimension2D
351        256,      // maxImageDimension3D
352        4096,     // maxImageDimensionCube
353        256,      // maxImageArrayLayers
354        65536,    // maxTexelBufferElements
355        16384,    // maxUniformBufferRange
356        1 << 27,  // maxStorageBufferRange
357        128,      // maxPushConstantsSize
358        4096,     // maxMemoryAllocationCount
359        4000,     // maxSamplerAllocationCount
360        1,        // bufferImageGranularity
361        0,        // sparseAddressSpaceSize
362        4,        // maxBoundDescriptorSets
363        16,       // maxPerStageDescriptorSamplers
364        12,       // maxPerStageDescriptorUniformBuffers
365        4,        // maxPerStageDescriptorStorageBuffers
366        16,       // maxPerStageDescriptorSampledImages
367        4,        // maxPerStageDescriptorStorageImages
368        4,        // maxPerStageDescriptorInputAttachments
369        128,      // maxPerStageResources
370        96,       // maxDescriptorSetSamplers
371        72,       // maxDescriptorSetUniformBuffers
372        8,        // maxDescriptorSetUniformBuffersDynamic
373        24,       // maxDescriptorSetStorageBuffers
374        4,        // maxDescriptorSetStorageBuffersDynamic
375        96,       // maxDescriptorSetSampledImages
376        24,       // maxDescriptorSetStorageImages
377        4,        // maxDescriptorSetInputAttachments
378        16,       // maxVertexInputAttributes
379        16,       // maxVertexInputBindings
380        2047,     // maxVertexInputAttributeOffset
381        2048,     // maxVertexInputBindingStride
382        64,       // maxVertexOutputComponents
383        0,        // maxTessellationGenerationLevel
384        0,        // maxTessellationPatchSize
385        0,        // maxTessellationControlPerVertexInputComponents
386        0,        // maxTessellationControlPerVertexOutputComponents
387        0,        // maxTessellationControlPerPatchOutputComponents
388        0,        // maxTessellationControlTotalOutputComponents
389        0,        // maxTessellationEvaluationInputComponents
390        0,        // maxTessellationEvaluationOutputComponents
391        0,        // maxGeometryShaderInvocations
392        0,        // maxGeometryInputComponents
393        0,        // maxGeometryOutputComponents
394        0,        // maxGeometryOutputVertices
395        0,        // maxGeometryTotalOutputComponents
396        64,       // maxFragmentInputComponents
397        4,        // maxFragmentOutputAttachments
398        0,        // maxFragmentDualSrcAttachments
399        4,        // maxFragmentCombinedOutputResources
400        16384,    // maxComputeSharedMemorySize
401        {65536, 65536, 65536},  // maxComputeWorkGroupCount[3]
402        128,                    // maxComputeWorkGroupInvocations
403        {128, 128, 64},         // maxComputeWorkGroupSize[3]
404        4,                      // subPixelPrecisionBits
405        4,                      // subTexelPrecisionBits
406        4,                      // mipmapPrecisionBits
407        UINT32_MAX,             // maxDrawIndexedIndexValue
408        1,                      // maxDrawIndirectCount
409        2,                      // maxSamplerLodBias
410        1,                      // maxSamplerAnisotropy
411        1,                      // maxViewports
412        {4096, 4096},           // maxViewportDimensions[2]
413        {-8192.0f, 8191.0f},    // viewportBoundsRange[2]
414        0,                      // viewportSubPixelBits
415        64,                     // minMemoryMapAlignment
416        256,                    // minTexelBufferOffsetAlignment
417        256,                    // minUniformBufferOffsetAlignment
418        256,                    // minStorageBufferOffsetAlignment
419        -8,                     // minTexelOffset
420        7,                      // maxTexelOffset
421        0,                      // minTexelGatherOffset
422        0,                      // maxTexelGatherOffset
423        0.0f,                   // minInterpolationOffset
424        0.0f,                   // maxInterpolationOffset
425        0,                      // subPixelInterpolationOffsetBits
426        4096,                   // maxFramebufferWidth
427        4096,                   // maxFramebufferHeight
428        256,                    // maxFramebufferLayers
429        VK_SAMPLE_COUNT_1_BIT |
430            VK_SAMPLE_COUNT_4_BIT,  // framebufferColorSampleCounts
431        VK_SAMPLE_COUNT_1_BIT |
432            VK_SAMPLE_COUNT_4_BIT,  // framebufferDepthSampleCounts
433        VK_SAMPLE_COUNT_1_BIT |
434            VK_SAMPLE_COUNT_4_BIT,  // framebufferStencilSampleCounts
435        VK_SAMPLE_COUNT_1_BIT |
436            VK_SAMPLE_COUNT_4_BIT,  // framebufferNoAttachmentsSampleCounts
437        4,                          // maxColorAttachments
438        VK_SAMPLE_COUNT_1_BIT |
439            VK_SAMPLE_COUNT_4_BIT,  // sampledImageColorSampleCounts
440        VK_SAMPLE_COUNT_1_BIT,      // sampledImageIntegerSampleCounts
441        VK_SAMPLE_COUNT_1_BIT |
442            VK_SAMPLE_COUNT_4_BIT,  // sampledImageDepthSampleCounts
443        VK_SAMPLE_COUNT_1_BIT |
444            VK_SAMPLE_COUNT_4_BIT,  // sampledImageStencilSampleCounts
445        VK_SAMPLE_COUNT_1_BIT,      // storageImageSampleCounts
446        1,                          // maxSampleMaskWords
447        VK_TRUE,                    // timestampComputeAndGraphics
448        1,                          // timestampPeriod
449        0,                          // maxClipDistances
450        0,                          // maxCullDistances
451        0,                          // maxCombinedClipAndCullDistances
452        2,                          // discreteQueuePriorities
453        {1.0f, 1.0f},               // pointSizeRange[2]
454        {1.0f, 1.0f},               // lineWidthRange[2]
455        0.0f,                       // pointSizeGranularity
456        0.0f,                       // lineWidthGranularity
457        VK_TRUE,                    // strictLines
458        VK_TRUE,                    // standardSampleLocations
459        1,                          // optimalBufferCopyOffsetAlignment
460        1,                          // optimalBufferCopyRowPitchAlignment
461        64,                         // nonCoherentAtomSize
462    };
463}
464
465void GetPhysicalDeviceQueueFamilyProperties(
466    VkPhysicalDevice,
467    uint32_t* count,
468    VkQueueFamilyProperties* properties) {
469    if (!properties || *count > 1)
470        *count = 1;
471    if (properties && *count == 1) {
472        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
473                                 VK_QUEUE_TRANSFER_BIT;
474        properties->queueCount = 1;
475        properties->timestampValidBits = 64;
476        properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
477    }
478}
479
480void GetPhysicalDeviceMemoryProperties(
481    VkPhysicalDevice,
482    VkPhysicalDeviceMemoryProperties* properties) {
483    properties->memoryTypeCount = 1;
484    properties->memoryTypes[0].propertyFlags =
485        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
486        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
487        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
488        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
489    properties->memoryTypes[0].heapIndex = 0;
490    properties->memoryHeapCount = 1;
491    properties->memoryHeaps[0].size = kMaxDeviceMemory;
492    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
493}
494
495void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
496                               VkPhysicalDeviceFeatures* features) {
497    *features = VkPhysicalDeviceFeatures{
498        VK_TRUE,   // robustBufferAccess
499        VK_FALSE,  // fullDrawIndexUint32
500        VK_FALSE,  // imageCubeArray
501        VK_FALSE,  // independentBlend
502        VK_FALSE,  // geometryShader
503        VK_FALSE,  // tessellationShader
504        VK_FALSE,  // sampleRateShading
505        VK_FALSE,  // dualSrcBlend
506        VK_FALSE,  // logicOp
507        VK_FALSE,  // multiDrawIndirect
508        VK_FALSE,  // drawIndirectFirstInstance
509        VK_FALSE,  // depthClamp
510        VK_FALSE,  // depthBiasClamp
511        VK_FALSE,  // fillModeNonSolid
512        VK_FALSE,  // depthBounds
513        VK_FALSE,  // wideLines
514        VK_FALSE,  // largePoints
515        VK_FALSE,  // alphaToOne
516        VK_FALSE,  // multiViewport
517        VK_FALSE,  // samplerAnisotropy
518        VK_FALSE,  // textureCompressionETC2
519        VK_FALSE,  // textureCompressionASTC_LDR
520        VK_FALSE,  // textureCompressionBC
521        VK_FALSE,  // occlusionQueryPrecise
522        VK_FALSE,  // pipelineStatisticsQuery
523        VK_FALSE,  // vertexPipelineStoresAndAtomics
524        VK_FALSE,  // fragmentStoresAndAtomics
525        VK_FALSE,  // shaderTessellationAndGeometryPointSize
526        VK_FALSE,  // shaderImageGatherExtended
527        VK_FALSE,  // shaderStorageImageExtendedFormats
528        VK_FALSE,  // shaderStorageImageMultisample
529        VK_FALSE,  // shaderStorageImageReadWithoutFormat
530        VK_FALSE,  // shaderStorageImageWriteWithoutFormat
531        VK_FALSE,  // shaderUniformBufferArrayDynamicIndexing
532        VK_FALSE,  // shaderSampledImageArrayDynamicIndexing
533        VK_FALSE,  // shaderStorageBufferArrayDynamicIndexing
534        VK_FALSE,  // shaderStorageImageArrayDynamicIndexing
535        VK_FALSE,  // shaderClipDistance
536        VK_FALSE,  // shaderCullDistance
537        VK_FALSE,  // shaderFloat64
538        VK_FALSE,  // shaderInt64
539        VK_FALSE,  // shaderInt16
540        VK_FALSE,  // shaderResourceResidency
541        VK_FALSE,  // shaderResourceMinLod
542        VK_FALSE,  // sparseBinding
543        VK_FALSE,  // sparseResidencyBuffer
544        VK_FALSE,  // sparseResidencyImage2D
545        VK_FALSE,  // sparseResidencyImage3D
546        VK_FALSE,  // sparseResidency2Samples
547        VK_FALSE,  // sparseResidency4Samples
548        VK_FALSE,  // sparseResidency8Samples
549        VK_FALSE,  // sparseResidency16Samples
550        VK_FALSE,  // sparseResidencyAliased
551        VK_FALSE,  // variableMultisampleRate
552        VK_FALSE,  // inheritedQueries
553    };
554}
555
556// -----------------------------------------------------------------------------
557// Device
558
559VkResult CreateDevice(VkPhysicalDevice physical_device,
560                      const VkDeviceCreateInfo* create_info,
561                      const VkAllocationCallbacks* allocator,
562                      VkDevice* out_device) {
563    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
564    if (!allocator)
565        allocator = &instance->allocator;
566    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
567        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
568        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
569    if (!device)
570        return VK_ERROR_OUT_OF_HOST_MEMORY;
571
572    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
573    device->allocator = *allocator;
574    device->instance = instance;
575    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
576    std::fill(device->next_handle.begin(), device->next_handle.end(),
577              UINT64_C(0));
578
579    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
580        if (strcmp(create_info->ppEnabledExtensionNames[i],
581                   VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
582            ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
583        }
584    }
585
586    *out_device = device;
587    return VK_SUCCESS;
588}
589
590void DestroyDevice(VkDevice device,
591                   const VkAllocationCallbacks* /*allocator*/) {
592    if (!device)
593        return;
594    device->allocator.pfnFree(device->allocator.pUserData, device);
595}
596
597void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
598    *queue = &device->queue;
599}
600
601// -----------------------------------------------------------------------------
602// CommandPool
603
604struct CommandPool {
605    typedef VkCommandPool HandleType;
606    VkAllocationCallbacks allocator;
607};
608DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
609
610VkResult CreateCommandPool(VkDevice device,
611                           const VkCommandPoolCreateInfo* /*create_info*/,
612                           const VkAllocationCallbacks* allocator,
613                           VkCommandPool* cmd_pool) {
614    if (!allocator)
615        allocator = &device->allocator;
616    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
617        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
618        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
619    if (!pool)
620        return VK_ERROR_OUT_OF_HOST_MEMORY;
621    pool->allocator = *allocator;
622    *cmd_pool = GetHandleToCommandPool(pool);
623    return VK_SUCCESS;
624}
625
626void DestroyCommandPool(VkDevice /*device*/,
627                        VkCommandPool cmd_pool,
628                        const VkAllocationCallbacks* /*allocator*/) {
629    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
630    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
631}
632
633// -----------------------------------------------------------------------------
634// CmdBuffer
635
636VkResult AllocateCommandBuffers(VkDevice /*device*/,
637                                const VkCommandBufferAllocateInfo* alloc_info,
638                                VkCommandBuffer* cmdbufs) {
639    VkResult result = VK_SUCCESS;
640    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
641    std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
642    for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
643        cmdbufs[i] =
644            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
645                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
646                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
647        if (!cmdbufs[i]) {
648            result = VK_ERROR_OUT_OF_HOST_MEMORY;
649            break;
650        }
651        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
652    }
653    if (result != VK_SUCCESS) {
654        for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
655            if (!cmdbufs[i])
656                break;
657            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
658        }
659    }
660    return result;
661}
662
663void FreeCommandBuffers(VkDevice /*device*/,
664                        VkCommandPool cmd_pool,
665                        uint32_t count,
666                        const VkCommandBuffer* cmdbufs) {
667    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
668    for (uint32_t i = 0; i < count; i++)
669        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
670}
671
672// -----------------------------------------------------------------------------
673// DeviceMemory
674
675struct DeviceMemory {
676    typedef VkDeviceMemory HandleType;
677    VkDeviceSize size;
678    alignas(16) uint8_t data[0];
679};
680DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
681
682VkResult AllocateMemory(VkDevice device,
683                        const VkMemoryAllocateInfo* alloc_info,
684                        const VkAllocationCallbacks* allocator,
685                        VkDeviceMemory* mem_handle) {
686    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
687        return VK_ERROR_OUT_OF_HOST_MEMORY;
688    if (!allocator)
689        allocator = &device->allocator;
690
691    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
692    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
693        allocator->pUserData, size, alignof(DeviceMemory),
694        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
695    if (!mem)
696        return VK_ERROR_OUT_OF_HOST_MEMORY;
697    mem->size = size;
698    *mem_handle = GetHandleToDeviceMemory(mem);
699    return VK_SUCCESS;
700}
701
702void FreeMemory(VkDevice device,
703                VkDeviceMemory mem_handle,
704                const VkAllocationCallbacks* allocator) {
705    if (!allocator)
706        allocator = &device->allocator;
707    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
708    allocator->pfnFree(allocator->pUserData, mem);
709}
710
711VkResult MapMemory(VkDevice,
712                   VkDeviceMemory mem_handle,
713                   VkDeviceSize offset,
714                   VkDeviceSize,
715                   VkMemoryMapFlags,
716                   void** out_ptr) {
717    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
718    *out_ptr = &mem->data[0] + offset;
719    return VK_SUCCESS;
720}
721
722// -----------------------------------------------------------------------------
723// Buffer
724
725struct Buffer {
726    typedef VkBuffer HandleType;
727    VkDeviceSize size;
728};
729DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
730
731VkResult CreateBuffer(VkDevice device,
732                      const VkBufferCreateInfo* create_info,
733                      const VkAllocationCallbacks* allocator,
734                      VkBuffer* buffer_handle) {
735    ALOGW_IF(create_info->size > kMaxDeviceMemory,
736             "CreateBuffer: requested size 0x%" PRIx64
737             " exceeds max device memory size 0x%" PRIx64,
738             create_info->size, kMaxDeviceMemory);
739    if (!allocator)
740        allocator = &device->allocator;
741    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
742        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
743        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
744    if (!buffer)
745        return VK_ERROR_OUT_OF_HOST_MEMORY;
746    buffer->size = create_info->size;
747    *buffer_handle = GetHandleToBuffer(buffer);
748    return VK_SUCCESS;
749}
750
751void GetBufferMemoryRequirements(VkDevice,
752                                 VkBuffer buffer_handle,
753                                 VkMemoryRequirements* requirements) {
754    Buffer* buffer = GetBufferFromHandle(buffer_handle);
755    requirements->size = buffer->size;
756    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
757    requirements->memoryTypeBits = 0x1;
758}
759
760void DestroyBuffer(VkDevice device,
761                   VkBuffer buffer_handle,
762                   const VkAllocationCallbacks* allocator) {
763    if (!allocator)
764        allocator = &device->allocator;
765    Buffer* buffer = GetBufferFromHandle(buffer_handle);
766    allocator->pfnFree(allocator->pUserData, buffer);
767}
768
769// -----------------------------------------------------------------------------
770// Image
771
772struct Image {
773    typedef VkImage HandleType;
774    VkDeviceSize size;
775};
776DEFINE_OBJECT_HANDLE_CONVERSION(Image)
777
778VkResult CreateImage(VkDevice device,
779                     const VkImageCreateInfo* create_info,
780                     const VkAllocationCallbacks* allocator,
781                     VkImage* image_handle) {
782    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
783        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
784        create_info->mipLevels != 1) {
785        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
786              create_info->imageType, create_info->format,
787              create_info->mipLevels);
788        return VK_ERROR_OUT_OF_HOST_MEMORY;
789    }
790
791    VkDeviceSize size =
792        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
793        create_info->arrayLayers * create_info->samples * 4u;
794    ALOGW_IF(size > kMaxDeviceMemory,
795             "CreateImage: image size 0x%" PRIx64
796             " exceeds max device memory size 0x%" PRIx64,
797             size, kMaxDeviceMemory);
798
799    if (!allocator)
800        allocator = &device->allocator;
801    Image* image = static_cast<Image*>(allocator->pfnAllocation(
802        allocator->pUserData, sizeof(Image), alignof(Image),
803        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
804    if (!image)
805        return VK_ERROR_OUT_OF_HOST_MEMORY;
806    image->size = size;
807    *image_handle = GetHandleToImage(image);
808    return VK_SUCCESS;
809}
810
811void GetImageMemoryRequirements(VkDevice,
812                                VkImage image_handle,
813                                VkMemoryRequirements* requirements) {
814    Image* image = GetImageFromHandle(image_handle);
815    requirements->size = image->size;
816    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
817    requirements->memoryTypeBits = 0x1;
818}
819
820void DestroyImage(VkDevice device,
821                  VkImage image_handle,
822                  const VkAllocationCallbacks* allocator) {
823    if (!allocator)
824        allocator = &device->allocator;
825    Image* image = GetImageFromHandle(image_handle);
826    allocator->pfnFree(allocator->pUserData, image);
827}
828
829VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
830                                         VkFormat,
831                                         VkImageUsageFlags,
832                                         int* grallocUsage) {
833    // The null driver never reads or writes the gralloc buffer
834    *grallocUsage = 0;
835    return VK_SUCCESS;
836}
837
838VkResult AcquireImageANDROID(VkDevice,
839                             VkImage,
840                             int fence,
841                             VkSemaphore,
842                             VkFence) {
843    close(fence);
844    return VK_SUCCESS;
845}
846
847VkResult QueueSignalReleaseImageANDROID(VkQueue,
848                                        uint32_t,
849                                        const VkSemaphore*,
850                                        VkImage,
851                                        int* fence) {
852    *fence = -1;
853    return VK_SUCCESS;
854}
855
856// -----------------------------------------------------------------------------
857// No-op types
858
859VkResult CreateBufferView(VkDevice device,
860                          const VkBufferViewCreateInfo*,
861                          const VkAllocationCallbacks* /*allocator*/,
862                          VkBufferView* view) {
863    *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
864    return VK_SUCCESS;
865}
866
867VkResult CreateDescriptorPool(VkDevice device,
868                              const VkDescriptorPoolCreateInfo*,
869                              const VkAllocationCallbacks* /*allocator*/,
870                              VkDescriptorPool* pool) {
871    *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
872    return VK_SUCCESS;
873}
874
875VkResult AllocateDescriptorSets(VkDevice device,
876                                const VkDescriptorSetAllocateInfo* alloc_info,
877                                VkDescriptorSet* descriptor_sets) {
878    for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
879        descriptor_sets[i] =
880            AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
881    return VK_SUCCESS;
882}
883
884VkResult CreateDescriptorSetLayout(VkDevice device,
885                                   const VkDescriptorSetLayoutCreateInfo*,
886                                   const VkAllocationCallbacks* /*allocator*/,
887                                   VkDescriptorSetLayout* layout) {
888    *layout = AllocHandle<VkDescriptorSetLayout>(
889        device, HandleType::kDescriptorSetLayout);
890    return VK_SUCCESS;
891}
892
893VkResult CreateEvent(VkDevice device,
894                     const VkEventCreateInfo*,
895                     const VkAllocationCallbacks* /*allocator*/,
896                     VkEvent* event) {
897    *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
898    return VK_SUCCESS;
899}
900
901VkResult CreateFence(VkDevice device,
902                     const VkFenceCreateInfo*,
903                     const VkAllocationCallbacks* /*allocator*/,
904                     VkFence* fence) {
905    *fence = AllocHandle<VkFence>(device, HandleType::kFence);
906    return VK_SUCCESS;
907}
908
909VkResult CreateFramebuffer(VkDevice device,
910                           const VkFramebufferCreateInfo*,
911                           const VkAllocationCallbacks* /*allocator*/,
912                           VkFramebuffer* framebuffer) {
913    *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
914    return VK_SUCCESS;
915}
916
917VkResult CreateImageView(VkDevice device,
918                         const VkImageViewCreateInfo*,
919                         const VkAllocationCallbacks* /*allocator*/,
920                         VkImageView* view) {
921    *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
922    return VK_SUCCESS;
923}
924
925VkResult CreateGraphicsPipelines(VkDevice device,
926                                 VkPipelineCache,
927                                 uint32_t count,
928                                 const VkGraphicsPipelineCreateInfo*,
929                                 const VkAllocationCallbacks* /*allocator*/,
930                                 VkPipeline* pipelines) {
931    for (uint32_t i = 0; i < count; i++)
932        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
933    return VK_SUCCESS;
934}
935
936VkResult CreateComputePipelines(VkDevice device,
937                                VkPipelineCache,
938                                uint32_t count,
939                                const VkComputePipelineCreateInfo*,
940                                const VkAllocationCallbacks* /*allocator*/,
941                                VkPipeline* pipelines) {
942    for (uint32_t i = 0; i < count; i++)
943        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
944    return VK_SUCCESS;
945}
946
947VkResult CreatePipelineCache(VkDevice device,
948                             const VkPipelineCacheCreateInfo*,
949                             const VkAllocationCallbacks* /*allocator*/,
950                             VkPipelineCache* cache) {
951    *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
952    return VK_SUCCESS;
953}
954
955VkResult CreatePipelineLayout(VkDevice device,
956                              const VkPipelineLayoutCreateInfo*,
957                              const VkAllocationCallbacks* /*allocator*/,
958                              VkPipelineLayout* layout) {
959    *layout =
960        AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
961    return VK_SUCCESS;
962}
963
964VkResult CreateQueryPool(VkDevice device,
965                         const VkQueryPoolCreateInfo*,
966                         const VkAllocationCallbacks* /*allocator*/,
967                         VkQueryPool* pool) {
968    *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
969    return VK_SUCCESS;
970}
971
972VkResult CreateRenderPass(VkDevice device,
973                          const VkRenderPassCreateInfo*,
974                          const VkAllocationCallbacks* /*allocator*/,
975                          VkRenderPass* renderpass) {
976    *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
977    return VK_SUCCESS;
978}
979
980VkResult CreateSampler(VkDevice device,
981                       const VkSamplerCreateInfo*,
982                       const VkAllocationCallbacks* /*allocator*/,
983                       VkSampler* sampler) {
984    *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
985    return VK_SUCCESS;
986}
987
988VkResult CreateSemaphore(VkDevice device,
989                         const VkSemaphoreCreateInfo*,
990                         const VkAllocationCallbacks* /*allocator*/,
991                         VkSemaphore* semaphore) {
992    *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
993    return VK_SUCCESS;
994}
995
996VkResult CreateShaderModule(VkDevice device,
997                            const VkShaderModuleCreateInfo*,
998                            const VkAllocationCallbacks* /*allocator*/,
999                            VkShaderModule* module) {
1000    *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
1001    return VK_SUCCESS;
1002}
1003
1004VkResult CreateDebugReportCallbackEXT(VkInstance instance,
1005                                      const VkDebugReportCallbackCreateInfoEXT*,
1006                                      const VkAllocationCallbacks*,
1007                                      VkDebugReportCallbackEXT* callback) {
1008    *callback = AllocHandle<VkDebugReportCallbackEXT>(
1009        instance, HandleType::kDebugReportCallbackEXT);
1010    return VK_SUCCESS;
1011}
1012
1013// -----------------------------------------------------------------------------
1014// No-op entrypoints
1015
1016// clang-format off
1017#pragma clang diagnostic push
1018#pragma clang diagnostic ignored "-Wunused-parameter"
1019
1020void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
1021    ALOGV("TODO: vk%s", __FUNCTION__);
1022}
1023
1024VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
1025    ALOGV("TODO: vk%s", __FUNCTION__);
1026    return VK_SUCCESS;
1027}
1028
1029VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
1030    ALOGV("TODO: vk%s", __FUNCTION__);
1031    return VK_SUCCESS;
1032}
1033
1034VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
1035    return VK_SUCCESS;
1036}
1037
1038VkResult QueueWaitIdle(VkQueue queue) {
1039    ALOGV("TODO: vk%s", __FUNCTION__);
1040    return VK_SUCCESS;
1041}
1042
1043VkResult DeviceWaitIdle(VkDevice device) {
1044    ALOGV("TODO: vk%s", __FUNCTION__);
1045    return VK_SUCCESS;
1046}
1047
1048void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
1049}
1050
1051VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1052    ALOGV("TODO: vk%s", __FUNCTION__);
1053    return VK_SUCCESS;
1054}
1055
1056VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1057    ALOGV("TODO: vk%s", __FUNCTION__);
1058    return VK_SUCCESS;
1059}
1060
1061void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
1062    ALOGV("TODO: vk%s", __FUNCTION__);
1063}
1064
1065VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
1066    return VK_SUCCESS;
1067}
1068
1069VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
1070    return VK_SUCCESS;
1071}
1072
1073void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
1074    ALOGV("TODO: vk%s", __FUNCTION__);
1075}
1076
1077void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
1078    ALOGV("TODO: vk%s", __FUNCTION__);
1079}
1080
1081VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
1082    ALOGV("TODO: vk%s", __FUNCTION__);
1083    return VK_SUCCESS;
1084}
1085
1086void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
1087}
1088
1089VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
1090    return VK_SUCCESS;
1091}
1092
1093VkResult GetFenceStatus(VkDevice device, VkFence fence) {
1094    ALOGV("TODO: vk%s", __FUNCTION__);
1095    return VK_SUCCESS;
1096}
1097
1098VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
1099    return VK_SUCCESS;
1100}
1101
1102void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
1103}
1104
1105void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
1106}
1107
1108VkResult GetEventStatus(VkDevice device, VkEvent event) {
1109    ALOGV("TODO: vk%s", __FUNCTION__);
1110    return VK_SUCCESS;
1111}
1112
1113VkResult SetEvent(VkDevice device, VkEvent event) {
1114    ALOGV("TODO: vk%s", __FUNCTION__);
1115    return VK_SUCCESS;
1116}
1117
1118VkResult ResetEvent(VkDevice device, VkEvent event) {
1119    ALOGV("TODO: vk%s", __FUNCTION__);
1120    return VK_SUCCESS;
1121}
1122
1123void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
1124}
1125
1126VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1127    ALOGV("TODO: vk%s", __FUNCTION__);
1128    return VK_SUCCESS;
1129}
1130
1131void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
1132}
1133
1134void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
1135    ALOGV("TODO: vk%s", __FUNCTION__);
1136}
1137
1138void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
1139}
1140
1141void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
1142}
1143
1144void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
1145}
1146
1147VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
1148    ALOGV("TODO: vk%s", __FUNCTION__);
1149    return VK_SUCCESS;
1150}
1151
1152VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
1153    ALOGV("TODO: vk%s", __FUNCTION__);
1154    return VK_SUCCESS;
1155}
1156
1157void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
1158}
1159
1160void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
1161}
1162
1163void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
1164}
1165
1166void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
1167}
1168
1169void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
1170}
1171
1172VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
1173    ALOGV("TODO: vk%s", __FUNCTION__);
1174    return VK_SUCCESS;
1175}
1176
1177void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1178    ALOGV("TODO: vk%s", __FUNCTION__);
1179}
1180
1181VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1182    ALOGV("TODO: vk%s", __FUNCTION__);
1183    return VK_SUCCESS;
1184}
1185
1186void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1187}
1188
1189void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1190}
1191
1192void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1193    ALOGV("TODO: vk%s", __FUNCTION__);
1194}
1195
1196VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1197    ALOGV("TODO: vk%s", __FUNCTION__);
1198    return VK_SUCCESS;
1199}
1200
1201VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1202    return VK_SUCCESS;
1203}
1204
1205VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1206    return VK_SUCCESS;
1207}
1208
1209VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1210    ALOGV("TODO: vk%s", __FUNCTION__);
1211    return VK_SUCCESS;
1212}
1213
1214void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1215}
1216
1217void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1218}
1219
1220void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1221}
1222
1223void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1224}
1225
1226void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1227}
1228
1229void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1230}
1231
1232void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1233}
1234
1235void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1236}
1237
1238void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1239}
1240
1241void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1242}
1243
1244void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1245}
1246
1247void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1248}
1249
1250void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1251}
1252
1253void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1254}
1255
1256void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1257}
1258
1259void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1260}
1261
1262void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1263}
1264
1265void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1266}
1267
1268void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1269}
1270
1271void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1272}
1273
1274void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1275}
1276
1277void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1278}
1279
1280void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1281}
1282
1283void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1284}
1285
1286void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1287}
1288
1289void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1290}
1291
1292void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1293}
1294
1295void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1296}
1297
1298void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1299}
1300
1301void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1302}
1303
1304void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1305}
1306
1307void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1308}
1309
1310void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1311}
1312
1313void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1314}
1315
1316void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1317}
1318
1319void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1320}
1321
1322void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1323}
1324
1325void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1326}
1327
1328void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1329}
1330
1331void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1332}
1333
1334void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1335}
1336
1337void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1338}
1339
1340void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1341}
1342
1343void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1344}
1345
1346void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1347}
1348
1349void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1350}
1351
1352#pragma clang diagnostic pop
1353// clang-format on
1354
1355}  // namespace null_driver
1356