null_driver.cpp revision 45ba96e44c0d7d5dac6e86f0bfbfc44b4f2618ac
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18#include <vulkan/vk_ext_debug_report.h>
19
20#include <algorithm>
21#include <array>
22#include <inttypes.h>
23#include <string.h>
24
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver_gen.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40    uint64_t next_callback_handle;
41};
42
43struct VkQueue_T {
44    hwvulkan_dispatch_t dispatch;
45};
46
47struct VkCommandBuffer_T {
48    hwvulkan_dispatch_t dispatch;
49};
50
51namespace {
52// Handles for non-dispatchable objects are either pointers, or arbitrary
53// 64-bit non-zero values. We only use pointers when we need to keep state for
54// the object even in a null driver. For the rest, we form a handle as:
55//   [63:63] = 1 to distinguish from pointer handles*
56//   [62:56] = non-zero handle type enum value
57//   [55: 0] = per-handle-type incrementing counter
58// * This works because virtual addresses with the high bit set are reserved
59// for kernel data in all ABIs we run on.
60//
61// We never reclaim handles on vkDestroy*. It's not even necessary for us to
62// have distinct handles for live objects, and practically speaking we won't
63// ever create 2^56 objects of the same type from a single VkDevice in a null
64// driver.
65//
66// Using a namespace here instead of 'enum class' since we want scoped
67// constants but also want implicit conversions to integral types.
68namespace HandleType {
69enum Enum {
70    kBufferView,
71    kDebugReportCallbackEXT,
72    kDescriptorPool,
73    kDescriptorSet,
74    kDescriptorSetLayout,
75    kEvent,
76    kFence,
77    kFramebuffer,
78    kImageView,
79    kPipeline,
80    kPipelineCache,
81    kPipelineLayout,
82    kQueryPool,
83    kRenderPass,
84    kSampler,
85    kSemaphore,
86    kShaderModule,
87
88    kNumTypes
89};
90}  // namespace HandleType
91
92const VkDeviceSize kMaxDeviceMemory = 0x10000000;  // 256 MiB, arbitrary
93
94}  // anonymous namespace
95
96struct VkDevice_T {
97    hwvulkan_dispatch_t dispatch;
98    VkAllocationCallbacks allocator;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133int CloseDevice(struct hw_device_t* /*device*/) {
134    // nothing to do - opening a device doesn't allocate any resources
135    return 0;
136}
137
138hwvulkan_device_t nulldrv_device = {
139    .common =
140        {
141            .tag = HARDWARE_DEVICE_TAG,
142            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
143            .module = &HAL_MODULE_INFO_SYM.common,
144            .close = CloseDevice,
145        },
146    .EnumerateInstanceExtensionProperties =
147        EnumerateInstanceExtensionProperties,
148    .CreateInstance = CreateInstance,
149    .GetInstanceProcAddr = GetInstanceProcAddr};
150
151int OpenDevice(const hw_module_t* /*module*/,
152               const char* id,
153               hw_device_t** device) {
154    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
155        *device = &nulldrv_device.common;
156        return 0;
157    }
158    return -ENOENT;
159}
160
161VkInstance_T* GetInstanceFromPhysicalDevice(
162    VkPhysicalDevice_T* physical_device) {
163    return reinterpret_cast<VkInstance_T*>(
164        reinterpret_cast<uintptr_t>(physical_device) -
165        offsetof(VkInstance_T, physical_device));
166}
167
168uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
169    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
170    ALOGE_IF(*next_handle == kHandleMask,
171             "non-dispatchable handles of type=%" PRIu64
172             " are about to overflow",
173             type);
174    return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
175           ((*next_handle)++ & kHandleMask);
176}
177
178template <class Handle>
179Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
180    return reinterpret_cast<Handle>(
181        AllocHandle(type, &instance->next_callback_handle));
182}
183
184template <class Handle>
185Handle AllocHandle(VkDevice device, HandleType::Enum type) {
186    return reinterpret_cast<Handle>(
187        AllocHandle(type, &device->next_handle[type]));
188}
189
190}  // namespace
191
192namespace null_driver {
193
194#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
195    T* Get##T##FromHandle(Vk##T h);                     \
196    T* Get##T##FromHandle(Vk##T h) {                    \
197        return reinterpret_cast<T*>(uintptr_t(h));      \
198    }                                                   \
199    Vk##T GetHandleTo##T(const T* obj);                 \
200    Vk##T GetHandleTo##T(const T* obj) {                \
201        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
202    }
203
204// -----------------------------------------------------------------------------
205// Global
206
207VKAPI_ATTR
208VkResult EnumerateInstanceExtensionProperties(
209    const char* layer_name,
210    uint32_t* count,
211    VkExtensionProperties* properties) {
212    if (layer_name) {
213        ALOGW(
214            "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
215            "with a layer name ('%s')",
216            layer_name);
217    }
218
219// NOTE: Change this to zero to report and extension, which can be useful
220// for testing changes to the loader.
221#if 1
222    (void)properties;  // unused
223    *count = 0;
224    return VK_SUCCESS;
225#else
226    const VkExtensionProperties kExtensions[] = {
227        {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
228    const uint32_t kExtensionsCount =
229        sizeof(kExtensions) / sizeof(kExtensions[0]);
230
231    if (!properties || *count > kExtensionsCount)
232        *count = kExtensionsCount;
233    if (properties)
234        std::copy(kExtensions, kExtensions + *count, properties);
235    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
236#endif
237}
238
239VKAPI_ATTR
240VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
241                        const VkAllocationCallbacks* allocator,
242                        VkInstance* out_instance) {
243    // Assume the loader provided alloc callbacks even if the app didn't.
244    ALOG_ASSERT(
245        allocator,
246        "Missing alloc callbacks, loader or app should have provided them");
247
248    VkInstance_T* instance =
249        static_cast<VkInstance_T*>(allocator->pfnAllocation(
250            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
251            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
252    if (!instance)
253        return VK_ERROR_OUT_OF_HOST_MEMORY;
254
255    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
256    instance->allocator = *allocator;
257    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
258    instance->next_callback_handle = 0;
259
260    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
261        if (strcmp(create_info->ppEnabledExtensionNames[i],
262                   VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
263            ALOGV("instance extension '%s' requested",
264                  create_info->ppEnabledExtensionNames[i]);
265        } else {
266            ALOGW("unsupported extension '%s' requested",
267                  create_info->ppEnabledExtensionNames[i]);
268        }
269    }
270
271    *out_instance = instance;
272    return VK_SUCCESS;
273}
274
275VKAPI_ATTR
276PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
277    return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
278}
279
280VKAPI_ATTR
281PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
282    return GetInstanceProcAddr(name);
283}
284
285// -----------------------------------------------------------------------------
286// Instance
287
288void DestroyInstance(VkInstance instance,
289                     const VkAllocationCallbacks* /*allocator*/) {
290    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
291}
292
293// -----------------------------------------------------------------------------
294// PhysicalDevice
295
296VkResult EnumeratePhysicalDevices(VkInstance instance,
297                                  uint32_t* physical_device_count,
298                                  VkPhysicalDevice* physical_devices) {
299    if (physical_devices && *physical_device_count >= 1)
300        physical_devices[0] = &instance->physical_device;
301    *physical_device_count = 1;
302    return VK_SUCCESS;
303}
304
305VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
306                                        uint32_t* count,
307                                        VkLayerProperties* /*properties*/) {
308    ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
309    *count = 0;
310    return VK_SUCCESS;
311}
312
313VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
314                                            const char* layer_name,
315                                            uint32_t* count,
316                                            VkExtensionProperties* properties) {
317    if (layer_name) {
318        ALOGW(
319            "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
320            "with a layer name ('%s')",
321            layer_name);
322        *count = 0;
323        return VK_SUCCESS;
324    }
325
326    const VkExtensionProperties kExtensions[] = {
327        {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
328         VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
329    const uint32_t kExtensionsCount =
330        sizeof(kExtensions) / sizeof(kExtensions[0]);
331
332    if (!properties || *count > kExtensionsCount)
333        *count = kExtensionsCount;
334    if (properties)
335        std::copy(kExtensions, kExtensions + *count, properties);
336    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
337}
338
339void GetPhysicalDeviceProperties(VkPhysicalDevice,
340                                 VkPhysicalDeviceProperties* properties) {
341    properties->apiVersion = VK_API_VERSION;
342    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
343    properties->vendorID = 0;
344    properties->deviceID = 0;
345    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
346    strcpy(properties->deviceName, "Android Vulkan Null Driver");
347    memset(properties->pipelineCacheUUID, 0,
348           sizeof(properties->pipelineCacheUUID));
349    properties->limits = VkPhysicalDeviceLimits{
350        4096,     // maxImageDimension1D
351        4096,     // maxImageDimension2D
352        256,      // maxImageDimension3D
353        4096,     // maxImageDimensionCube
354        256,      // maxImageArrayLayers
355        65536,    // maxTexelBufferElements
356        16384,    // maxUniformBufferRange
357        1 << 27,  // maxStorageBufferRange
358        128,      // maxPushConstantsSize
359        4096,     // maxMemoryAllocationCount
360        4000,     // maxSamplerAllocationCount
361        1,        // bufferImageGranularity
362        0,        // sparseAddressSpaceSize
363        4,        // maxBoundDescriptorSets
364        16,       // maxPerStageDescriptorSamplers
365        12,       // maxPerStageDescriptorUniformBuffers
366        4,        // maxPerStageDescriptorStorageBuffers
367        16,       // maxPerStageDescriptorSampledImages
368        4,        // maxPerStageDescriptorStorageImages
369        4,        // maxPerStageDescriptorInputAttachments
370        128,      // maxPerStageResources
371        96,       // maxDescriptorSetSamplers
372        72,       // maxDescriptorSetUniformBuffers
373        8,        // maxDescriptorSetUniformBuffersDynamic
374        24,       // maxDescriptorSetStorageBuffers
375        4,        // maxDescriptorSetStorageBuffersDynamic
376        96,       // maxDescriptorSetSampledImages
377        24,       // maxDescriptorSetStorageImages
378        4,        // maxDescriptorSetInputAttachments
379        16,       // maxVertexInputAttributes
380        16,       // maxVertexInputBindings
381        2047,     // maxVertexInputAttributeOffset
382        2048,     // maxVertexInputBindingStride
383        64,       // maxVertexOutputComponents
384        0,        // maxTessellationGenerationLevel
385        0,        // maxTessellationPatchSize
386        0,        // maxTessellationControlPerVertexInputComponents
387        0,        // maxTessellationControlPerVertexOutputComponents
388        0,        // maxTessellationControlPerPatchOutputComponents
389        0,        // maxTessellationControlTotalOutputComponents
390        0,        // maxTessellationEvaluationInputComponents
391        0,        // maxTessellationEvaluationOutputComponents
392        0,        // maxGeometryShaderInvocations
393        0,        // maxGeometryInputComponents
394        0,        // maxGeometryOutputComponents
395        0,        // maxGeometryOutputVertices
396        0,        // maxGeometryTotalOutputComponents
397        64,       // maxFragmentInputComponents
398        4,        // maxFragmentOutputAttachments
399        0,        // maxFragmentDualSrcAttachments
400        4,        // maxFragmentCombinedOutputResources
401        16384,    // maxComputeSharedMemorySize
402        {65536, 65536, 65536},  // maxComputeWorkGroupCount[3]
403        128,                    // maxComputeWorkGroupInvocations
404        {128, 128, 64},         // maxComputeWorkGroupSize[3]
405        4,                      // subPixelPrecisionBits
406        4,                      // subTexelPrecisionBits
407        4,                      // mipmapPrecisionBits
408        UINT32_MAX,             // maxDrawIndexedIndexValue
409        1,                      // maxDrawIndirectCount
410        2,                      // maxSamplerLodBias
411        1,                      // maxSamplerAnisotropy
412        1,                      // maxViewports
413        {4096, 4096},           // maxViewportDimensions[2]
414        {-8192.0f, 8191.0f},    // viewportBoundsRange[2]
415        0,                      // viewportSubPixelBits
416        64,                     // minMemoryMapAlignment
417        256,                    // minTexelBufferOffsetAlignment
418        256,                    // minUniformBufferOffsetAlignment
419        256,                    // minStorageBufferOffsetAlignment
420        -8,                     // minTexelOffset
421        7,                      // maxTexelOffset
422        0,                      // minTexelGatherOffset
423        0,                      // maxTexelGatherOffset
424        0.0f,                   // minInterpolationOffset
425        0.0f,                   // maxInterpolationOffset
426        0,                      // subPixelInterpolationOffsetBits
427        4096,                   // maxFramebufferWidth
428        4096,                   // maxFramebufferHeight
429        256,                    // maxFramebufferLayers
430        VK_SAMPLE_COUNT_1_BIT |
431            VK_SAMPLE_COUNT_4_BIT,  // framebufferColorSampleCounts
432        VK_SAMPLE_COUNT_1_BIT |
433            VK_SAMPLE_COUNT_4_BIT,  // framebufferDepthSampleCounts
434        VK_SAMPLE_COUNT_1_BIT |
435            VK_SAMPLE_COUNT_4_BIT,  // framebufferStencilSampleCounts
436        VK_SAMPLE_COUNT_1_BIT |
437            VK_SAMPLE_COUNT_4_BIT,  // framebufferNoAttachmentsSampleCounts
438        4,                          // maxColorAttachments
439        VK_SAMPLE_COUNT_1_BIT |
440            VK_SAMPLE_COUNT_4_BIT,  // sampledImageColorSampleCounts
441        VK_SAMPLE_COUNT_1_BIT,      // sampledImageIntegerSampleCounts
442        VK_SAMPLE_COUNT_1_BIT |
443            VK_SAMPLE_COUNT_4_BIT,  // sampledImageDepthSampleCounts
444        VK_SAMPLE_COUNT_1_BIT |
445            VK_SAMPLE_COUNT_4_BIT,  // sampledImageStencilSampleCounts
446        VK_SAMPLE_COUNT_1_BIT,      // storageImageSampleCounts
447        1,                          // maxSampleMaskWords
448        VK_TRUE,                    // timestampComputeAndGraphics
449        1,                          // timestampPeriod
450        0,                          // maxClipDistances
451        0,                          // maxCullDistances
452        0,                          // maxCombinedClipAndCullDistances
453        2,                          // discreteQueuePriorities
454        {1.0f, 1.0f},               // pointSizeRange[2]
455        {1.0f, 1.0f},               // lineWidthRange[2]
456        0.0f,                       // pointSizeGranularity
457        0.0f,                       // lineWidthGranularity
458        VK_TRUE,                    // strictLines
459        VK_TRUE,                    // standardSampleLocations
460        1,                          // optimalBufferCopyOffsetAlignment
461        1,                          // optimalBufferCopyRowPitchAlignment
462        64,                         // nonCoherentAtomSize
463    };
464}
465
466void GetPhysicalDeviceQueueFamilyProperties(
467    VkPhysicalDevice,
468    uint32_t* count,
469    VkQueueFamilyProperties* properties) {
470    if (!properties || *count > 1)
471        *count = 1;
472    if (properties && *count == 1) {
473        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
474                                 VK_QUEUE_TRANSFER_BIT;
475        properties->queueCount = 1;
476        properties->timestampValidBits = 64;
477        properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
478    }
479}
480
481void GetPhysicalDeviceMemoryProperties(
482    VkPhysicalDevice,
483    VkPhysicalDeviceMemoryProperties* properties) {
484    properties->memoryTypeCount = 1;
485    properties->memoryTypes[0].propertyFlags =
486        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
487        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
488        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
489        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
490    properties->memoryTypes[0].heapIndex = 0;
491    properties->memoryHeapCount = 1;
492    properties->memoryHeaps[0].size = kMaxDeviceMemory;
493    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
494}
495
496void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
497                               VkPhysicalDeviceFeatures* features) {
498    *features = VkPhysicalDeviceFeatures{
499        VK_TRUE,   // robustBufferAccess
500        VK_FALSE,  // fullDrawIndexUint32
501        VK_FALSE,  // imageCubeArray
502        VK_FALSE,  // independentBlend
503        VK_FALSE,  // geometryShader
504        VK_FALSE,  // tessellationShader
505        VK_FALSE,  // sampleRateShading
506        VK_FALSE,  // dualSrcBlend
507        VK_FALSE,  // logicOp
508        VK_FALSE,  // multiDrawIndirect
509        VK_FALSE,  // drawIndirectFirstInstance
510        VK_FALSE,  // depthClamp
511        VK_FALSE,  // depthBiasClamp
512        VK_FALSE,  // fillModeNonSolid
513        VK_FALSE,  // depthBounds
514        VK_FALSE,  // wideLines
515        VK_FALSE,  // largePoints
516        VK_FALSE,  // alphaToOne
517        VK_FALSE,  // multiViewport
518        VK_FALSE,  // samplerAnisotropy
519        VK_FALSE,  // textureCompressionETC2
520        VK_FALSE,  // textureCompressionASTC_LDR
521        VK_FALSE,  // textureCompressionBC
522        VK_FALSE,  // occlusionQueryPrecise
523        VK_FALSE,  // pipelineStatisticsQuery
524        VK_FALSE,  // vertexPipelineStoresAndAtomics
525        VK_FALSE,  // fragmentStoresAndAtomics
526        VK_FALSE,  // shaderTessellationAndGeometryPointSize
527        VK_FALSE,  // shaderImageGatherExtended
528        VK_FALSE,  // shaderStorageImageExtendedFormats
529        VK_FALSE,  // shaderStorageImageMultisample
530        VK_FALSE,  // shaderStorageImageReadWithoutFormat
531        VK_FALSE,  // shaderStorageImageWriteWithoutFormat
532        VK_FALSE,  // shaderUniformBufferArrayDynamicIndexing
533        VK_FALSE,  // shaderSampledImageArrayDynamicIndexing
534        VK_FALSE,  // shaderStorageBufferArrayDynamicIndexing
535        VK_FALSE,  // shaderStorageImageArrayDynamicIndexing
536        VK_FALSE,  // shaderClipDistance
537        VK_FALSE,  // shaderCullDistance
538        VK_FALSE,  // shaderFloat64
539        VK_FALSE,  // shaderInt64
540        VK_FALSE,  // shaderInt16
541        VK_FALSE,  // shaderResourceResidency
542        VK_FALSE,  // shaderResourceMinLod
543        VK_FALSE,  // sparseBinding
544        VK_FALSE,  // sparseResidencyBuffer
545        VK_FALSE,  // sparseResidencyImage2D
546        VK_FALSE,  // sparseResidencyImage3D
547        VK_FALSE,  // sparseResidency2Samples
548        VK_FALSE,  // sparseResidency4Samples
549        VK_FALSE,  // sparseResidency8Samples
550        VK_FALSE,  // sparseResidency16Samples
551        VK_FALSE,  // sparseResidencyAliased
552        VK_FALSE,  // variableMultisampleRate
553        VK_FALSE,  // inheritedQueries
554    };
555}
556
557// -----------------------------------------------------------------------------
558// Device
559
560VkResult CreateDevice(VkPhysicalDevice physical_device,
561                      const VkDeviceCreateInfo* create_info,
562                      const VkAllocationCallbacks* allocator,
563                      VkDevice* out_device) {
564    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
565    if (!allocator)
566        allocator = &instance->allocator;
567    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
568        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
569        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
570    if (!device)
571        return VK_ERROR_OUT_OF_HOST_MEMORY;
572
573    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
574    device->allocator = *allocator;
575    device->instance = instance;
576    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
577    std::fill(device->next_handle.begin(), device->next_handle.end(),
578              UINT64_C(0));
579
580    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
581        if (strcmp(create_info->ppEnabledExtensionNames[i],
582                   VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
583            ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
584        }
585    }
586
587    *out_device = device;
588    return VK_SUCCESS;
589}
590
591void DestroyDevice(VkDevice device,
592                   const VkAllocationCallbacks* /*allocator*/) {
593    if (!device)
594        return;
595    device->allocator.pfnFree(device->allocator.pUserData, device);
596}
597
598void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
599    *queue = &device->queue;
600}
601
602// -----------------------------------------------------------------------------
603// CommandPool
604
605struct CommandPool {
606    typedef VkCommandPool HandleType;
607    VkAllocationCallbacks allocator;
608};
609DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
610
611VkResult CreateCommandPool(VkDevice device,
612                           const VkCommandPoolCreateInfo* /*create_info*/,
613                           const VkAllocationCallbacks* allocator,
614                           VkCommandPool* cmd_pool) {
615    if (!allocator)
616        allocator = &device->allocator;
617    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
618        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
619        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
620    if (!pool)
621        return VK_ERROR_OUT_OF_HOST_MEMORY;
622    pool->allocator = *allocator;
623    *cmd_pool = GetHandleToCommandPool(pool);
624    return VK_SUCCESS;
625}
626
627void DestroyCommandPool(VkDevice /*device*/,
628                        VkCommandPool cmd_pool,
629                        const VkAllocationCallbacks* /*allocator*/) {
630    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
631    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
632}
633
634// -----------------------------------------------------------------------------
635// CmdBuffer
636
637VkResult AllocateCommandBuffers(VkDevice /*device*/,
638                                const VkCommandBufferAllocateInfo* alloc_info,
639                                VkCommandBuffer* cmdbufs) {
640    VkResult result = VK_SUCCESS;
641    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
642    std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
643    for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
644        cmdbufs[i] =
645            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
646                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
647                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
648        if (!cmdbufs[i]) {
649            result = VK_ERROR_OUT_OF_HOST_MEMORY;
650            break;
651        }
652        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
653    }
654    if (result != VK_SUCCESS) {
655        for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
656            if (!cmdbufs[i])
657                break;
658            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
659        }
660    }
661    return result;
662}
663
664void FreeCommandBuffers(VkDevice /*device*/,
665                        VkCommandPool cmd_pool,
666                        uint32_t count,
667                        const VkCommandBuffer* cmdbufs) {
668    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
669    for (uint32_t i = 0; i < count; i++)
670        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
671}
672
673// -----------------------------------------------------------------------------
674// DeviceMemory
675
676struct DeviceMemory {
677    typedef VkDeviceMemory HandleType;
678    VkDeviceSize size;
679    alignas(16) uint8_t data[0];
680};
681DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
682
683VkResult AllocateMemory(VkDevice device,
684                        const VkMemoryAllocateInfo* alloc_info,
685                        const VkAllocationCallbacks* allocator,
686                        VkDeviceMemory* mem_handle) {
687    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
688        return VK_ERROR_OUT_OF_HOST_MEMORY;
689    if (!allocator)
690        allocator = &device->allocator;
691
692    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
693    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
694        allocator->pUserData, size, alignof(DeviceMemory),
695        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
696    if (!mem)
697        return VK_ERROR_OUT_OF_HOST_MEMORY;
698    mem->size = size;
699    *mem_handle = GetHandleToDeviceMemory(mem);
700    return VK_SUCCESS;
701}
702
703void FreeMemory(VkDevice device,
704                VkDeviceMemory mem_handle,
705                const VkAllocationCallbacks* allocator) {
706    if (!allocator)
707        allocator = &device->allocator;
708    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
709    allocator->pfnFree(allocator->pUserData, mem);
710}
711
712VkResult MapMemory(VkDevice,
713                   VkDeviceMemory mem_handle,
714                   VkDeviceSize offset,
715                   VkDeviceSize,
716                   VkMemoryMapFlags,
717                   void** out_ptr) {
718    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
719    *out_ptr = &mem->data[0] + offset;
720    return VK_SUCCESS;
721}
722
723// -----------------------------------------------------------------------------
724// Buffer
725
726struct Buffer {
727    typedef VkBuffer HandleType;
728    VkDeviceSize size;
729};
730DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
731
732VkResult CreateBuffer(VkDevice device,
733                      const VkBufferCreateInfo* create_info,
734                      const VkAllocationCallbacks* allocator,
735                      VkBuffer* buffer_handle) {
736    ALOGW_IF(create_info->size > kMaxDeviceMemory,
737             "CreateBuffer: requested size 0x%" PRIx64
738             " exceeds max device memory size 0x%" PRIx64,
739             create_info->size, kMaxDeviceMemory);
740    if (!allocator)
741        allocator = &device->allocator;
742    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
743        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
744        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
745    if (!buffer)
746        return VK_ERROR_OUT_OF_HOST_MEMORY;
747    buffer->size = create_info->size;
748    *buffer_handle = GetHandleToBuffer(buffer);
749    return VK_SUCCESS;
750}
751
752void GetBufferMemoryRequirements(VkDevice,
753                                 VkBuffer buffer_handle,
754                                 VkMemoryRequirements* requirements) {
755    Buffer* buffer = GetBufferFromHandle(buffer_handle);
756    requirements->size = buffer->size;
757    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
758    requirements->memoryTypeBits = 0x1;
759}
760
761void DestroyBuffer(VkDevice device,
762                   VkBuffer buffer_handle,
763                   const VkAllocationCallbacks* allocator) {
764    if (!allocator)
765        allocator = &device->allocator;
766    Buffer* buffer = GetBufferFromHandle(buffer_handle);
767    allocator->pfnFree(allocator->pUserData, buffer);
768}
769
770// -----------------------------------------------------------------------------
771// Image
772
773struct Image {
774    typedef VkImage HandleType;
775    VkDeviceSize size;
776};
777DEFINE_OBJECT_HANDLE_CONVERSION(Image)
778
779VkResult CreateImage(VkDevice device,
780                     const VkImageCreateInfo* create_info,
781                     const VkAllocationCallbacks* allocator,
782                     VkImage* image_handle) {
783    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
784        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
785        create_info->mipLevels != 1) {
786        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
787              create_info->imageType, create_info->format,
788              create_info->mipLevels);
789        return VK_ERROR_OUT_OF_HOST_MEMORY;
790    }
791
792    VkDeviceSize size =
793        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
794        create_info->arrayLayers * create_info->samples * 4u;
795    ALOGW_IF(size > kMaxDeviceMemory,
796             "CreateImage: image size 0x%" PRIx64
797             " exceeds max device memory size 0x%" PRIx64,
798             size, kMaxDeviceMemory);
799
800    if (!allocator)
801        allocator = &device->allocator;
802    Image* image = static_cast<Image*>(allocator->pfnAllocation(
803        allocator->pUserData, sizeof(Image), alignof(Image),
804        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
805    if (!image)
806        return VK_ERROR_OUT_OF_HOST_MEMORY;
807    image->size = size;
808    *image_handle = GetHandleToImage(image);
809    return VK_SUCCESS;
810}
811
812void GetImageMemoryRequirements(VkDevice,
813                                VkImage image_handle,
814                                VkMemoryRequirements* requirements) {
815    Image* image = GetImageFromHandle(image_handle);
816    requirements->size = image->size;
817    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
818    requirements->memoryTypeBits = 0x1;
819}
820
821void DestroyImage(VkDevice device,
822                  VkImage image_handle,
823                  const VkAllocationCallbacks* allocator) {
824    if (!allocator)
825        allocator = &device->allocator;
826    Image* image = GetImageFromHandle(image_handle);
827    allocator->pfnFree(allocator->pUserData, image);
828}
829
830VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
831                                         VkFormat,
832                                         VkImageUsageFlags,
833                                         int* grallocUsage) {
834    // The null driver never reads or writes the gralloc buffer
835    *grallocUsage = 0;
836    return VK_SUCCESS;
837}
838
839VkResult AcquireImageANDROID(VkDevice,
840                             VkImage,
841                             int fence,
842                             VkSemaphore,
843                             VkFence) {
844    close(fence);
845    return VK_SUCCESS;
846}
847
848VkResult QueueSignalReleaseImageANDROID(VkQueue,
849                                        uint32_t,
850                                        const VkSemaphore*,
851                                        VkImage,
852                                        int* fence) {
853    *fence = -1;
854    return VK_SUCCESS;
855}
856
857// -----------------------------------------------------------------------------
858// No-op types
859
860VkResult CreateBufferView(VkDevice device,
861                          const VkBufferViewCreateInfo*,
862                          const VkAllocationCallbacks* /*allocator*/,
863                          VkBufferView* view) {
864    *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
865    return VK_SUCCESS;
866}
867
868VkResult CreateDescriptorPool(VkDevice device,
869                              const VkDescriptorPoolCreateInfo*,
870                              const VkAllocationCallbacks* /*allocator*/,
871                              VkDescriptorPool* pool) {
872    *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
873    return VK_SUCCESS;
874}
875
876VkResult AllocateDescriptorSets(VkDevice device,
877                                const VkDescriptorSetAllocateInfo* alloc_info,
878                                VkDescriptorSet* descriptor_sets) {
879    for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
880        descriptor_sets[i] =
881            AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
882    return VK_SUCCESS;
883}
884
885VkResult CreateDescriptorSetLayout(VkDevice device,
886                                   const VkDescriptorSetLayoutCreateInfo*,
887                                   const VkAllocationCallbacks* /*allocator*/,
888                                   VkDescriptorSetLayout* layout) {
889    *layout = AllocHandle<VkDescriptorSetLayout>(
890        device, HandleType::kDescriptorSetLayout);
891    return VK_SUCCESS;
892}
893
894VkResult CreateEvent(VkDevice device,
895                     const VkEventCreateInfo*,
896                     const VkAllocationCallbacks* /*allocator*/,
897                     VkEvent* event) {
898    *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
899    return VK_SUCCESS;
900}
901
902VkResult CreateFence(VkDevice device,
903                     const VkFenceCreateInfo*,
904                     const VkAllocationCallbacks* /*allocator*/,
905                     VkFence* fence) {
906    *fence = AllocHandle<VkFence>(device, HandleType::kFence);
907    return VK_SUCCESS;
908}
909
910VkResult CreateFramebuffer(VkDevice device,
911                           const VkFramebufferCreateInfo*,
912                           const VkAllocationCallbacks* /*allocator*/,
913                           VkFramebuffer* framebuffer) {
914    *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
915    return VK_SUCCESS;
916}
917
918VkResult CreateImageView(VkDevice device,
919                         const VkImageViewCreateInfo*,
920                         const VkAllocationCallbacks* /*allocator*/,
921                         VkImageView* view) {
922    *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
923    return VK_SUCCESS;
924}
925
926VkResult CreateGraphicsPipelines(VkDevice device,
927                                 VkPipelineCache,
928                                 uint32_t count,
929                                 const VkGraphicsPipelineCreateInfo*,
930                                 const VkAllocationCallbacks* /*allocator*/,
931                                 VkPipeline* pipelines) {
932    for (uint32_t i = 0; i < count; i++)
933        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
934    return VK_SUCCESS;
935}
936
937VkResult CreateComputePipelines(VkDevice device,
938                                VkPipelineCache,
939                                uint32_t count,
940                                const VkComputePipelineCreateInfo*,
941                                const VkAllocationCallbacks* /*allocator*/,
942                                VkPipeline* pipelines) {
943    for (uint32_t i = 0; i < count; i++)
944        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
945    return VK_SUCCESS;
946}
947
948VkResult CreatePipelineCache(VkDevice device,
949                             const VkPipelineCacheCreateInfo*,
950                             const VkAllocationCallbacks* /*allocator*/,
951                             VkPipelineCache* cache) {
952    *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
953    return VK_SUCCESS;
954}
955
956VkResult CreatePipelineLayout(VkDevice device,
957                              const VkPipelineLayoutCreateInfo*,
958                              const VkAllocationCallbacks* /*allocator*/,
959                              VkPipelineLayout* layout) {
960    *layout =
961        AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
962    return VK_SUCCESS;
963}
964
965VkResult CreateQueryPool(VkDevice device,
966                         const VkQueryPoolCreateInfo*,
967                         const VkAllocationCallbacks* /*allocator*/,
968                         VkQueryPool* pool) {
969    *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
970    return VK_SUCCESS;
971}
972
973VkResult CreateRenderPass(VkDevice device,
974                          const VkRenderPassCreateInfo*,
975                          const VkAllocationCallbacks* /*allocator*/,
976                          VkRenderPass* renderpass) {
977    *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
978    return VK_SUCCESS;
979}
980
981VkResult CreateSampler(VkDevice device,
982                       const VkSamplerCreateInfo*,
983                       const VkAllocationCallbacks* /*allocator*/,
984                       VkSampler* sampler) {
985    *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
986    return VK_SUCCESS;
987}
988
989VkResult CreateSemaphore(VkDevice device,
990                         const VkSemaphoreCreateInfo*,
991                         const VkAllocationCallbacks* /*allocator*/,
992                         VkSemaphore* semaphore) {
993    *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
994    return VK_SUCCESS;
995}
996
997VkResult CreateShaderModule(VkDevice device,
998                            const VkShaderModuleCreateInfo*,
999                            const VkAllocationCallbacks* /*allocator*/,
1000                            VkShaderModule* module) {
1001    *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
1002    return VK_SUCCESS;
1003}
1004
1005VkResult CreateDebugReportCallbackEXT(VkInstance instance,
1006                                      const VkDebugReportCallbackCreateInfoEXT*,
1007                                      const VkAllocationCallbacks*,
1008                                      VkDebugReportCallbackEXT* callback) {
1009    *callback = AllocHandle<VkDebugReportCallbackEXT>(
1010        instance, HandleType::kDebugReportCallbackEXT);
1011    return VK_SUCCESS;
1012}
1013
1014// -----------------------------------------------------------------------------
1015// No-op entrypoints
1016
1017// clang-format off
1018#pragma clang diagnostic push
1019#pragma clang diagnostic ignored "-Wunused-parameter"
1020
1021void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
1022    ALOGV("TODO: vk%s", __FUNCTION__);
1023}
1024
1025VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
1026    ALOGV("TODO: vk%s", __FUNCTION__);
1027    return VK_SUCCESS;
1028}
1029
1030VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
1031    ALOGV("TODO: vk%s", __FUNCTION__);
1032    return VK_SUCCESS;
1033}
1034
1035VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
1036    return VK_SUCCESS;
1037}
1038
1039VkResult QueueWaitIdle(VkQueue queue) {
1040    ALOGV("TODO: vk%s", __FUNCTION__);
1041    return VK_SUCCESS;
1042}
1043
1044VkResult DeviceWaitIdle(VkDevice device) {
1045    ALOGV("TODO: vk%s", __FUNCTION__);
1046    return VK_SUCCESS;
1047}
1048
1049void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
1050}
1051
1052VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1053    ALOGV("TODO: vk%s", __FUNCTION__);
1054    return VK_SUCCESS;
1055}
1056
1057VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1058    ALOGV("TODO: vk%s", __FUNCTION__);
1059    return VK_SUCCESS;
1060}
1061
1062void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
1063    ALOGV("TODO: vk%s", __FUNCTION__);
1064}
1065
1066VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
1067    return VK_SUCCESS;
1068}
1069
1070VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
1071    return VK_SUCCESS;
1072}
1073
1074void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
1075    ALOGV("TODO: vk%s", __FUNCTION__);
1076}
1077
1078void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
1079    ALOGV("TODO: vk%s", __FUNCTION__);
1080}
1081
1082VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
1083    ALOGV("TODO: vk%s", __FUNCTION__);
1084    return VK_SUCCESS;
1085}
1086
1087void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
1088}
1089
1090VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
1091    return VK_SUCCESS;
1092}
1093
1094VkResult GetFenceStatus(VkDevice device, VkFence fence) {
1095    ALOGV("TODO: vk%s", __FUNCTION__);
1096    return VK_SUCCESS;
1097}
1098
1099VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
1100    return VK_SUCCESS;
1101}
1102
1103void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
1104}
1105
1106void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
1107}
1108
1109VkResult GetEventStatus(VkDevice device, VkEvent event) {
1110    ALOGV("TODO: vk%s", __FUNCTION__);
1111    return VK_SUCCESS;
1112}
1113
1114VkResult SetEvent(VkDevice device, VkEvent event) {
1115    ALOGV("TODO: vk%s", __FUNCTION__);
1116    return VK_SUCCESS;
1117}
1118
1119VkResult ResetEvent(VkDevice device, VkEvent event) {
1120    ALOGV("TODO: vk%s", __FUNCTION__);
1121    return VK_SUCCESS;
1122}
1123
1124void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
1125}
1126
1127VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1128    ALOGV("TODO: vk%s", __FUNCTION__);
1129    return VK_SUCCESS;
1130}
1131
1132void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
1133}
1134
1135void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
1136    ALOGV("TODO: vk%s", __FUNCTION__);
1137}
1138
1139void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
1140}
1141
1142void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
1143}
1144
1145void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
1146}
1147
1148VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
1149    ALOGV("TODO: vk%s", __FUNCTION__);
1150    return VK_SUCCESS;
1151}
1152
1153VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
1154    ALOGV("TODO: vk%s", __FUNCTION__);
1155    return VK_SUCCESS;
1156}
1157
1158void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
1159}
1160
1161void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
1162}
1163
1164void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
1165}
1166
1167void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
1168}
1169
1170void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
1171}
1172
1173VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
1174    ALOGV("TODO: vk%s", __FUNCTION__);
1175    return VK_SUCCESS;
1176}
1177
1178void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1179    ALOGV("TODO: vk%s", __FUNCTION__);
1180}
1181
1182VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1183    ALOGV("TODO: vk%s", __FUNCTION__);
1184    return VK_SUCCESS;
1185}
1186
1187void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1188}
1189
1190void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1191}
1192
1193void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1194    ALOGV("TODO: vk%s", __FUNCTION__);
1195}
1196
1197VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1198    ALOGV("TODO: vk%s", __FUNCTION__);
1199    return VK_SUCCESS;
1200}
1201
1202VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1203    return VK_SUCCESS;
1204}
1205
1206VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1207    return VK_SUCCESS;
1208}
1209
1210VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1211    ALOGV("TODO: vk%s", __FUNCTION__);
1212    return VK_SUCCESS;
1213}
1214
1215void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1216}
1217
1218void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1219}
1220
1221void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1222}
1223
1224void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1225}
1226
1227void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1228}
1229
1230void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1231}
1232
1233void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1234}
1235
1236void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1237}
1238
1239void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1240}
1241
1242void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1243}
1244
1245void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1246}
1247
1248void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1249}
1250
1251void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1252}
1253
1254void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1255}
1256
1257void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1258}
1259
1260void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1261}
1262
1263void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1264}
1265
1266void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1267}
1268
1269void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1270}
1271
1272void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1273}
1274
1275void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1276}
1277
1278void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1279}
1280
1281void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1282}
1283
1284void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1285}
1286
1287void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1288}
1289
1290void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1291}
1292
1293void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1294}
1295
1296void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1297}
1298
1299void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1300}
1301
1302void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1303}
1304
1305void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1306}
1307
1308void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1309}
1310
1311void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1312}
1313
1314void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1315}
1316
1317void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1318}
1319
1320void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1321}
1322
1323void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1324}
1325
1326void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1327}
1328
1329void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1330}
1331
1332void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1333}
1334
1335void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1336}
1337
1338void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1339}
1340
1341void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1342}
1343
1344void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1345}
1346
1347void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1348}
1349
1350void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1351}
1352
1353#pragma clang diagnostic pop
1354// clang-format on
1355
1356}  // namespace null_driver
1357