1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <hardware/hwvulkan.h>
18
19 #include <algorithm>
20 #include <array>
21 #include <inttypes.h>
22 #include <stdlib.h>
23 #include <string.h>
24
25 #include <log/log.h>
26 #include <utils/Errors.h>
27
28 #include "null_driver_gen.h"
29
30 using namespace null_driver;
31
32 struct VkPhysicalDevice_T {
33 hwvulkan_dispatch_t dispatch;
34 };
35
36 struct VkInstance_T {
37 hwvulkan_dispatch_t dispatch;
38 VkAllocationCallbacks allocator;
39 VkPhysicalDevice_T physical_device;
40 uint64_t next_callback_handle;
41 };
42
43 struct VkQueue_T {
44 hwvulkan_dispatch_t dispatch;
45 };
46
47 struct VkCommandBuffer_T {
48 hwvulkan_dispatch_t dispatch;
49 };
50
51 namespace {
52 // Handles for non-dispatchable objects are either pointers, or arbitrary
53 // 64-bit non-zero values. We only use pointers when we need to keep state for
54 // the object even in a null driver. For the rest, we form a handle as:
55 // [63:63] = 1 to distinguish from pointer handles*
56 // [62:56] = non-zero handle type enum value
57 // [55: 0] = per-handle-type incrementing counter
58 // * This works because virtual addresses with the high bit set are reserved
59 // for kernel data in all ABIs we run on.
60 //
61 // We never reclaim handles on vkDestroy*. It's not even necessary for us to
62 // have distinct handles for live objects, and practically speaking we won't
63 // ever create 2^56 objects of the same type from a single VkDevice in a null
64 // driver.
65 //
66 // Using a namespace here instead of 'enum class' since we want scoped
67 // constants but also want implicit conversions to integral types.
68 namespace HandleType {
69 enum Enum {
70 kBufferView,
71 kDebugReportCallbackEXT,
72 kDescriptorPool,
73 kDescriptorSet,
74 kDescriptorSetLayout,
75 kEvent,
76 kFence,
77 kFramebuffer,
78 kImageView,
79 kPipeline,
80 kPipelineCache,
81 kPipelineLayout,
82 kQueryPool,
83 kRenderPass,
84 kSampler,
85 kSemaphore,
86 kShaderModule,
87
88 kNumTypes
89 };
90 } // namespace HandleType
91
92 const VkDeviceSize kMaxDeviceMemory = 0x10000000; // 256 MiB, arbitrary
93
94 } // anonymous namespace
95
96 struct VkDevice_T {
97 hwvulkan_dispatch_t dispatch;
98 VkAllocationCallbacks allocator;
99 VkInstance_T* instance;
100 VkQueue_T queue;
101 std::array<uint64_t, HandleType::kNumTypes> next_handle;
102 };
103
104 // -----------------------------------------------------------------------------
105 // Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106 // later.
107
108 namespace {
109 int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110 hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111 } // namespace
112
113 #pragma clang diagnostic push
114 #pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115 __attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116 .common =
117 {
118 .tag = HARDWARE_MODULE_TAG,
119 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120 .hal_api_version = HARDWARE_HAL_API_VERSION,
121 .id = HWVULKAN_HARDWARE_MODULE_ID,
122 .name = "Null Vulkan Driver",
123 .author = "The Android Open Source Project",
124 .methods = &nulldrv_module_methods,
125 },
126 };
127 #pragma clang diagnostic pop
128
129 // -----------------------------------------------------------------------------
130
131 namespace {
132
CloseDevice(struct hw_device_t *)133 int CloseDevice(struct hw_device_t* /*device*/) {
134 // nothing to do - opening a device doesn't allocate any resources
135 return 0;
136 }
137
138 hwvulkan_device_t nulldrv_device = {
139 .common =
140 {
141 .tag = HARDWARE_DEVICE_TAG,
142 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
143 .module = &HAL_MODULE_INFO_SYM.common,
144 .close = CloseDevice,
145 },
146 .EnumerateInstanceExtensionProperties =
147 EnumerateInstanceExtensionProperties,
148 .CreateInstance = CreateInstance,
149 .GetInstanceProcAddr = GetInstanceProcAddr};
150
OpenDevice(const hw_module_t *,const char * id,hw_device_t ** device)151 int OpenDevice(const hw_module_t* /*module*/,
152 const char* id,
153 hw_device_t** device) {
154 if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
155 *device = &nulldrv_device.common;
156 return 0;
157 }
158 return -ENOENT;
159 }
160
GetInstanceFromPhysicalDevice(VkPhysicalDevice_T * physical_device)161 VkInstance_T* GetInstanceFromPhysicalDevice(
162 VkPhysicalDevice_T* physical_device) {
163 return reinterpret_cast<VkInstance_T*>(
164 reinterpret_cast<uintptr_t>(physical_device) -
165 offsetof(VkInstance_T, physical_device));
166 }
167
AllocHandle(uint64_t type,uint64_t * next_handle)168 uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
169 const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
170 ALOGE_IF(*next_handle == kHandleMask,
171 "non-dispatchable handles of type=%" PRIu64
172 " are about to overflow",
173 type);
174 return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
175 ((*next_handle)++ & kHandleMask);
176 }
177
178 template <class Handle>
AllocHandle(VkInstance instance,HandleType::Enum type)179 Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
180 return reinterpret_cast<Handle>(
181 AllocHandle(type, &instance->next_callback_handle));
182 }
183
184 template <class Handle>
AllocHandle(VkDevice device,HandleType::Enum type)185 Handle AllocHandle(VkDevice device, HandleType::Enum type) {
186 return reinterpret_cast<Handle>(
187 AllocHandle(type, &device->next_handle[type]));
188 }
189
DefaultAllocate(void *,size_t size,size_t alignment,VkSystemAllocationScope)190 VKAPI_ATTR void* DefaultAllocate(void*,
191 size_t size,
192 size_t alignment,
193 VkSystemAllocationScope) {
194 void* ptr = nullptr;
195 // Vulkan requires 'alignment' to be a power of two, but posix_memalign
196 // additionally requires that it be at least sizeof(void*).
197 int ret = posix_memalign(&ptr, std::max(alignment, sizeof(void*)), size);
198 return ret == 0 ? ptr : nullptr;
199 }
200
DefaultReallocate(void *,void * ptr,size_t size,size_t alignment,VkSystemAllocationScope)201 VKAPI_ATTR void* DefaultReallocate(void*,
202 void* ptr,
203 size_t size,
204 size_t alignment,
205 VkSystemAllocationScope) {
206 if (size == 0) {
207 free(ptr);
208 return nullptr;
209 }
210
211 // TODO(jessehall): Right now we never shrink allocations; if the new
212 // request is smaller than the existing chunk, we just continue using it.
213 // The null driver never reallocs, so this doesn't matter. If that changes,
214 // or if this code is copied into some other project, this should probably
215 // have a heuristic to allocate-copy-free when doing so will save "enough"
216 // space.
217 size_t old_size = ptr ? malloc_usable_size(ptr) : 0;
218 if (size <= old_size)
219 return ptr;
220
221 void* new_ptr = nullptr;
222 if (posix_memalign(&new_ptr, std::max(alignment, sizeof(void*)), size) != 0)
223 return nullptr;
224 if (ptr) {
225 memcpy(new_ptr, ptr, std::min(old_size, size));
226 free(ptr);
227 }
228 return new_ptr;
229 }
230
DefaultFree(void *,void * ptr)231 VKAPI_ATTR void DefaultFree(void*, void* ptr) {
232 free(ptr);
233 }
234
235 const VkAllocationCallbacks kDefaultAllocCallbacks = {
236 .pUserData = nullptr,
237 .pfnAllocation = DefaultAllocate,
238 .pfnReallocation = DefaultReallocate,
239 .pfnFree = DefaultFree,
240 };
241
242 } // namespace
243
244 namespace null_driver {
245
246 #define DEFINE_OBJECT_HANDLE_CONVERSION(T) \
247 T* Get##T##FromHandle(Vk##T h); \
248 T* Get##T##FromHandle(Vk##T h) { \
249 return reinterpret_cast<T*>(uintptr_t(h)); \
250 } \
251 Vk##T GetHandleTo##T(const T* obj); \
252 Vk##T GetHandleTo##T(const T* obj) { \
253 return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
254 }
255
256 // -----------------------------------------------------------------------------
257 // Global
258
259 VKAPI_ATTR
EnumerateInstanceExtensionProperties(const char * layer_name,uint32_t * count,VkExtensionProperties * properties)260 VkResult EnumerateInstanceExtensionProperties(
261 const char* layer_name,
262 uint32_t* count,
263 VkExtensionProperties* properties) {
264 if (layer_name) {
265 ALOGW(
266 "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
267 "with a layer name ('%s')",
268 layer_name);
269 }
270
271 // NOTE: Change this to zero to report and extension, which can be useful
272 // for testing changes to the loader.
273 #if 1
274 (void)properties; // unused
275 *count = 0;
276 return VK_SUCCESS;
277 #else
278 const VkExtensionProperties kExtensions[] = {
279 {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
280 const uint32_t kExtensionsCount =
281 sizeof(kExtensions) / sizeof(kExtensions[0]);
282
283 if (!properties || *count > kExtensionsCount)
284 *count = kExtensionsCount;
285 if (properties)
286 std::copy(kExtensions, kExtensions + *count, properties);
287 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
288 #endif
289 }
290
291 VKAPI_ATTR
CreateInstance(const VkInstanceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkInstance * out_instance)292 VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
293 const VkAllocationCallbacks* allocator,
294 VkInstance* out_instance) {
295 if (!allocator)
296 allocator = &kDefaultAllocCallbacks;
297
298 VkInstance_T* instance =
299 static_cast<VkInstance_T*>(allocator->pfnAllocation(
300 allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
301 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
302 if (!instance)
303 return VK_ERROR_OUT_OF_HOST_MEMORY;
304
305 instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
306 instance->allocator = *allocator;
307 instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
308 instance->next_callback_handle = 0;
309
310 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
311 if (strcmp(create_info->ppEnabledExtensionNames[i],
312 VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
313 ALOGV("instance extension '%s' requested",
314 create_info->ppEnabledExtensionNames[i]);
315 } else {
316 ALOGW("unsupported extension '%s' requested",
317 create_info->ppEnabledExtensionNames[i]);
318 }
319 }
320
321 *out_instance = instance;
322 return VK_SUCCESS;
323 }
324
325 VKAPI_ATTR
GetInstanceProcAddr(VkInstance instance,const char * name)326 PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
327 return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
328 }
329
330 VKAPI_ATTR
GetDeviceProcAddr(VkDevice,const char * name)331 PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
332 return GetInstanceProcAddr(name);
333 }
334
335 // -----------------------------------------------------------------------------
336 // Instance
337
DestroyInstance(VkInstance instance,const VkAllocationCallbacks *)338 void DestroyInstance(VkInstance instance,
339 const VkAllocationCallbacks* /*allocator*/) {
340 instance->allocator.pfnFree(instance->allocator.pUserData, instance);
341 }
342
343 // -----------------------------------------------------------------------------
344 // PhysicalDevice
345
EnumeratePhysicalDevices(VkInstance instance,uint32_t * physical_device_count,VkPhysicalDevice * physical_devices)346 VkResult EnumeratePhysicalDevices(VkInstance instance,
347 uint32_t* physical_device_count,
348 VkPhysicalDevice* physical_devices) {
349 if (physical_devices && *physical_device_count >= 1)
350 physical_devices[0] = &instance->physical_device;
351 *physical_device_count = 1;
352 return VK_SUCCESS;
353 }
354
EnumerateDeviceLayerProperties(VkPhysicalDevice,uint32_t * count,VkLayerProperties *)355 VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
356 uint32_t* count,
357 VkLayerProperties* /*properties*/) {
358 ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
359 *count = 0;
360 return VK_SUCCESS;
361 }
362
EnumerateDeviceExtensionProperties(VkPhysicalDevice,const char * layer_name,uint32_t * count,VkExtensionProperties * properties)363 VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
364 const char* layer_name,
365 uint32_t* count,
366 VkExtensionProperties* properties) {
367 if (layer_name) {
368 ALOGW(
369 "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
370 "with a layer name ('%s')",
371 layer_name);
372 *count = 0;
373 return VK_SUCCESS;
374 }
375
376 const VkExtensionProperties kExtensions[] = {
377 {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
378 VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
379 const uint32_t kExtensionsCount =
380 sizeof(kExtensions) / sizeof(kExtensions[0]);
381
382 if (!properties || *count > kExtensionsCount)
383 *count = kExtensionsCount;
384 if (properties)
385 std::copy(kExtensions, kExtensions + *count, properties);
386 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
387 }
388
GetPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * properties)389 void GetPhysicalDeviceProperties(VkPhysicalDevice,
390 VkPhysicalDeviceProperties* properties) {
391 properties->apiVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
392 properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
393 properties->vendorID = 0;
394 properties->deviceID = 0;
395 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
396 strcpy(properties->deviceName, "Android Vulkan Null Driver");
397 memset(properties->pipelineCacheUUID, 0,
398 sizeof(properties->pipelineCacheUUID));
399 properties->limits = VkPhysicalDeviceLimits{
400 4096, // maxImageDimension1D
401 4096, // maxImageDimension2D
402 256, // maxImageDimension3D
403 4096, // maxImageDimensionCube
404 256, // maxImageArrayLayers
405 65536, // maxTexelBufferElements
406 16384, // maxUniformBufferRange
407 1 << 27, // maxStorageBufferRange
408 128, // maxPushConstantsSize
409 4096, // maxMemoryAllocationCount
410 4000, // maxSamplerAllocationCount
411 1, // bufferImageGranularity
412 0, // sparseAddressSpaceSize
413 4, // maxBoundDescriptorSets
414 16, // maxPerStageDescriptorSamplers
415 12, // maxPerStageDescriptorUniformBuffers
416 4, // maxPerStageDescriptorStorageBuffers
417 16, // maxPerStageDescriptorSampledImages
418 4, // maxPerStageDescriptorStorageImages
419 4, // maxPerStageDescriptorInputAttachments
420 128, // maxPerStageResources
421 96, // maxDescriptorSetSamplers
422 72, // maxDescriptorSetUniformBuffers
423 8, // maxDescriptorSetUniformBuffersDynamic
424 24, // maxDescriptorSetStorageBuffers
425 4, // maxDescriptorSetStorageBuffersDynamic
426 96, // maxDescriptorSetSampledImages
427 24, // maxDescriptorSetStorageImages
428 4, // maxDescriptorSetInputAttachments
429 16, // maxVertexInputAttributes
430 16, // maxVertexInputBindings
431 2047, // maxVertexInputAttributeOffset
432 2048, // maxVertexInputBindingStride
433 64, // maxVertexOutputComponents
434 0, // maxTessellationGenerationLevel
435 0, // maxTessellationPatchSize
436 0, // maxTessellationControlPerVertexInputComponents
437 0, // maxTessellationControlPerVertexOutputComponents
438 0, // maxTessellationControlPerPatchOutputComponents
439 0, // maxTessellationControlTotalOutputComponents
440 0, // maxTessellationEvaluationInputComponents
441 0, // maxTessellationEvaluationOutputComponents
442 0, // maxGeometryShaderInvocations
443 0, // maxGeometryInputComponents
444 0, // maxGeometryOutputComponents
445 0, // maxGeometryOutputVertices
446 0, // maxGeometryTotalOutputComponents
447 64, // maxFragmentInputComponents
448 4, // maxFragmentOutputAttachments
449 0, // maxFragmentDualSrcAttachments
450 4, // maxFragmentCombinedOutputResources
451 16384, // maxComputeSharedMemorySize
452 {65536, 65536, 65536}, // maxComputeWorkGroupCount[3]
453 128, // maxComputeWorkGroupInvocations
454 {128, 128, 64}, // maxComputeWorkGroupSize[3]
455 4, // subPixelPrecisionBits
456 4, // subTexelPrecisionBits
457 4, // mipmapPrecisionBits
458 UINT32_MAX, // maxDrawIndexedIndexValue
459 1, // maxDrawIndirectCount
460 2, // maxSamplerLodBias
461 1, // maxSamplerAnisotropy
462 1, // maxViewports
463 {4096, 4096}, // maxViewportDimensions[2]
464 {-8192.0f, 8191.0f}, // viewportBoundsRange[2]
465 0, // viewportSubPixelBits
466 64, // minMemoryMapAlignment
467 256, // minTexelBufferOffsetAlignment
468 256, // minUniformBufferOffsetAlignment
469 256, // minStorageBufferOffsetAlignment
470 -8, // minTexelOffset
471 7, // maxTexelOffset
472 0, // minTexelGatherOffset
473 0, // maxTexelGatherOffset
474 0.0f, // minInterpolationOffset
475 0.0f, // maxInterpolationOffset
476 0, // subPixelInterpolationOffsetBits
477 4096, // maxFramebufferWidth
478 4096, // maxFramebufferHeight
479 256, // maxFramebufferLayers
480 VK_SAMPLE_COUNT_1_BIT |
481 VK_SAMPLE_COUNT_4_BIT, // framebufferColorSampleCounts
482 VK_SAMPLE_COUNT_1_BIT |
483 VK_SAMPLE_COUNT_4_BIT, // framebufferDepthSampleCounts
484 VK_SAMPLE_COUNT_1_BIT |
485 VK_SAMPLE_COUNT_4_BIT, // framebufferStencilSampleCounts
486 VK_SAMPLE_COUNT_1_BIT |
487 VK_SAMPLE_COUNT_4_BIT, // framebufferNoAttachmentsSampleCounts
488 4, // maxColorAttachments
489 VK_SAMPLE_COUNT_1_BIT |
490 VK_SAMPLE_COUNT_4_BIT, // sampledImageColorSampleCounts
491 VK_SAMPLE_COUNT_1_BIT, // sampledImageIntegerSampleCounts
492 VK_SAMPLE_COUNT_1_BIT |
493 VK_SAMPLE_COUNT_4_BIT, // sampledImageDepthSampleCounts
494 VK_SAMPLE_COUNT_1_BIT |
495 VK_SAMPLE_COUNT_4_BIT, // sampledImageStencilSampleCounts
496 VK_SAMPLE_COUNT_1_BIT, // storageImageSampleCounts
497 1, // maxSampleMaskWords
498 VK_TRUE, // timestampComputeAndGraphics
499 1, // timestampPeriod
500 0, // maxClipDistances
501 0, // maxCullDistances
502 0, // maxCombinedClipAndCullDistances
503 2, // discreteQueuePriorities
504 {1.0f, 1.0f}, // pointSizeRange[2]
505 {1.0f, 1.0f}, // lineWidthRange[2]
506 0.0f, // pointSizeGranularity
507 0.0f, // lineWidthGranularity
508 VK_TRUE, // strictLines
509 VK_TRUE, // standardSampleLocations
510 1, // optimalBufferCopyOffsetAlignment
511 1, // optimalBufferCopyRowPitchAlignment
512 64, // nonCoherentAtomSize
513 };
514 }
515
GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,uint32_t * count,VkQueueFamilyProperties * properties)516 void GetPhysicalDeviceQueueFamilyProperties(
517 VkPhysicalDevice,
518 uint32_t* count,
519 VkQueueFamilyProperties* properties) {
520 if (!properties || *count > 1)
521 *count = 1;
522 if (properties && *count == 1) {
523 properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
524 VK_QUEUE_TRANSFER_BIT;
525 properties->queueCount = 1;
526 properties->timestampValidBits = 64;
527 properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
528 }
529 }
530
GetPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * properties)531 void GetPhysicalDeviceMemoryProperties(
532 VkPhysicalDevice,
533 VkPhysicalDeviceMemoryProperties* properties) {
534 properties->memoryTypeCount = 1;
535 properties->memoryTypes[0].propertyFlags =
536 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
537 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
538 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
539 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
540 properties->memoryTypes[0].heapIndex = 0;
541 properties->memoryHeapCount = 1;
542 properties->memoryHeaps[0].size = kMaxDeviceMemory;
543 properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
544 }
545
GetPhysicalDeviceFeatures(VkPhysicalDevice,VkPhysicalDeviceFeatures * features)546 void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
547 VkPhysicalDeviceFeatures* features) {
548 *features = VkPhysicalDeviceFeatures{
549 VK_TRUE, // robustBufferAccess
550 VK_FALSE, // fullDrawIndexUint32
551 VK_FALSE, // imageCubeArray
552 VK_FALSE, // independentBlend
553 VK_FALSE, // geometryShader
554 VK_FALSE, // tessellationShader
555 VK_FALSE, // sampleRateShading
556 VK_FALSE, // dualSrcBlend
557 VK_FALSE, // logicOp
558 VK_FALSE, // multiDrawIndirect
559 VK_FALSE, // drawIndirectFirstInstance
560 VK_FALSE, // depthClamp
561 VK_FALSE, // depthBiasClamp
562 VK_FALSE, // fillModeNonSolid
563 VK_FALSE, // depthBounds
564 VK_FALSE, // wideLines
565 VK_FALSE, // largePoints
566 VK_FALSE, // alphaToOne
567 VK_FALSE, // multiViewport
568 VK_FALSE, // samplerAnisotropy
569 VK_FALSE, // textureCompressionETC2
570 VK_FALSE, // textureCompressionASTC_LDR
571 VK_FALSE, // textureCompressionBC
572 VK_FALSE, // occlusionQueryPrecise
573 VK_FALSE, // pipelineStatisticsQuery
574 VK_FALSE, // vertexPipelineStoresAndAtomics
575 VK_FALSE, // fragmentStoresAndAtomics
576 VK_FALSE, // shaderTessellationAndGeometryPointSize
577 VK_FALSE, // shaderImageGatherExtended
578 VK_FALSE, // shaderStorageImageExtendedFormats
579 VK_FALSE, // shaderStorageImageMultisample
580 VK_FALSE, // shaderStorageImageReadWithoutFormat
581 VK_FALSE, // shaderStorageImageWriteWithoutFormat
582 VK_FALSE, // shaderUniformBufferArrayDynamicIndexing
583 VK_FALSE, // shaderSampledImageArrayDynamicIndexing
584 VK_FALSE, // shaderStorageBufferArrayDynamicIndexing
585 VK_FALSE, // shaderStorageImageArrayDynamicIndexing
586 VK_FALSE, // shaderClipDistance
587 VK_FALSE, // shaderCullDistance
588 VK_FALSE, // shaderFloat64
589 VK_FALSE, // shaderInt64
590 VK_FALSE, // shaderInt16
591 VK_FALSE, // shaderResourceResidency
592 VK_FALSE, // shaderResourceMinLod
593 VK_FALSE, // sparseBinding
594 VK_FALSE, // sparseResidencyBuffer
595 VK_FALSE, // sparseResidencyImage2D
596 VK_FALSE, // sparseResidencyImage3D
597 VK_FALSE, // sparseResidency2Samples
598 VK_FALSE, // sparseResidency4Samples
599 VK_FALSE, // sparseResidency8Samples
600 VK_FALSE, // sparseResidency16Samples
601 VK_FALSE, // sparseResidencyAliased
602 VK_FALSE, // variableMultisampleRate
603 VK_FALSE, // inheritedQueries
604 };
605 }
606
607 // -----------------------------------------------------------------------------
608 // Device
609
CreateDevice(VkPhysicalDevice physical_device,const VkDeviceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkDevice * out_device)610 VkResult CreateDevice(VkPhysicalDevice physical_device,
611 const VkDeviceCreateInfo* create_info,
612 const VkAllocationCallbacks* allocator,
613 VkDevice* out_device) {
614 VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
615 if (!allocator)
616 allocator = &instance->allocator;
617 VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
618 allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
619 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
620 if (!device)
621 return VK_ERROR_OUT_OF_HOST_MEMORY;
622
623 device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
624 device->allocator = *allocator;
625 device->instance = instance;
626 device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
627 std::fill(device->next_handle.begin(), device->next_handle.end(),
628 UINT64_C(0));
629
630 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
631 if (strcmp(create_info->ppEnabledExtensionNames[i],
632 VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
633 ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
634 }
635 }
636
637 *out_device = device;
638 return VK_SUCCESS;
639 }
640
DestroyDevice(VkDevice device,const VkAllocationCallbacks *)641 void DestroyDevice(VkDevice device,
642 const VkAllocationCallbacks* /*allocator*/) {
643 if (!device)
644 return;
645 device->allocator.pfnFree(device->allocator.pUserData, device);
646 }
647
GetDeviceQueue(VkDevice device,uint32_t,uint32_t,VkQueue * queue)648 void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
649 *queue = &device->queue;
650 }
651
652 // -----------------------------------------------------------------------------
653 // CommandPool
654
655 struct CommandPool {
656 typedef VkCommandPool HandleType;
657 VkAllocationCallbacks allocator;
658 };
DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)659 DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
660
661 VkResult CreateCommandPool(VkDevice device,
662 const VkCommandPoolCreateInfo* /*create_info*/,
663 const VkAllocationCallbacks* allocator,
664 VkCommandPool* cmd_pool) {
665 if (!allocator)
666 allocator = &device->allocator;
667 CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
668 allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
669 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
670 if (!pool)
671 return VK_ERROR_OUT_OF_HOST_MEMORY;
672 pool->allocator = *allocator;
673 *cmd_pool = GetHandleToCommandPool(pool);
674 return VK_SUCCESS;
675 }
676
DestroyCommandPool(VkDevice,VkCommandPool cmd_pool,const VkAllocationCallbacks *)677 void DestroyCommandPool(VkDevice /*device*/,
678 VkCommandPool cmd_pool,
679 const VkAllocationCallbacks* /*allocator*/) {
680 CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
681 pool->allocator.pfnFree(pool->allocator.pUserData, pool);
682 }
683
684 // -----------------------------------------------------------------------------
685 // CmdBuffer
686
AllocateCommandBuffers(VkDevice,const VkCommandBufferAllocateInfo * alloc_info,VkCommandBuffer * cmdbufs)687 VkResult AllocateCommandBuffers(VkDevice /*device*/,
688 const VkCommandBufferAllocateInfo* alloc_info,
689 VkCommandBuffer* cmdbufs) {
690 VkResult result = VK_SUCCESS;
691 CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
692 std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
693 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
694 cmdbufs[i] =
695 static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
696 pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
697 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
698 if (!cmdbufs[i]) {
699 result = VK_ERROR_OUT_OF_HOST_MEMORY;
700 break;
701 }
702 cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
703 }
704 if (result != VK_SUCCESS) {
705 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
706 if (!cmdbufs[i])
707 break;
708 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
709 }
710 }
711 return result;
712 }
713
FreeCommandBuffers(VkDevice,VkCommandPool cmd_pool,uint32_t count,const VkCommandBuffer * cmdbufs)714 void FreeCommandBuffers(VkDevice /*device*/,
715 VkCommandPool cmd_pool,
716 uint32_t count,
717 const VkCommandBuffer* cmdbufs) {
718 CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
719 for (uint32_t i = 0; i < count; i++)
720 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
721 }
722
723 // -----------------------------------------------------------------------------
724 // DeviceMemory
725
726 struct DeviceMemory {
727 typedef VkDeviceMemory HandleType;
728 VkDeviceSize size;
729 alignas(16) uint8_t data[0];
730 };
DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)731 DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
732
733 VkResult AllocateMemory(VkDevice device,
734 const VkMemoryAllocateInfo* alloc_info,
735 const VkAllocationCallbacks* allocator,
736 VkDeviceMemory* mem_handle) {
737 if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
738 return VK_ERROR_OUT_OF_HOST_MEMORY;
739 if (!allocator)
740 allocator = &device->allocator;
741
742 size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
743 DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
744 allocator->pUserData, size, alignof(DeviceMemory),
745 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
746 if (!mem)
747 return VK_ERROR_OUT_OF_HOST_MEMORY;
748 mem->size = size;
749 *mem_handle = GetHandleToDeviceMemory(mem);
750 return VK_SUCCESS;
751 }
752
FreeMemory(VkDevice device,VkDeviceMemory mem_handle,const VkAllocationCallbacks * allocator)753 void FreeMemory(VkDevice device,
754 VkDeviceMemory mem_handle,
755 const VkAllocationCallbacks* allocator) {
756 if (!allocator)
757 allocator = &device->allocator;
758 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
759 allocator->pfnFree(allocator->pUserData, mem);
760 }
761
MapMemory(VkDevice,VkDeviceMemory mem_handle,VkDeviceSize offset,VkDeviceSize,VkMemoryMapFlags,void ** out_ptr)762 VkResult MapMemory(VkDevice,
763 VkDeviceMemory mem_handle,
764 VkDeviceSize offset,
765 VkDeviceSize,
766 VkMemoryMapFlags,
767 void** out_ptr) {
768 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
769 *out_ptr = &mem->data[0] + offset;
770 return VK_SUCCESS;
771 }
772
773 // -----------------------------------------------------------------------------
774 // Buffer
775
776 struct Buffer {
777 typedef VkBuffer HandleType;
778 VkDeviceSize size;
779 };
DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)780 DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
781
782 VkResult CreateBuffer(VkDevice device,
783 const VkBufferCreateInfo* create_info,
784 const VkAllocationCallbacks* allocator,
785 VkBuffer* buffer_handle) {
786 ALOGW_IF(create_info->size > kMaxDeviceMemory,
787 "CreateBuffer: requested size 0x%" PRIx64
788 " exceeds max device memory size 0x%" PRIx64,
789 create_info->size, kMaxDeviceMemory);
790 if (!allocator)
791 allocator = &device->allocator;
792 Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
793 allocator->pUserData, sizeof(Buffer), alignof(Buffer),
794 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
795 if (!buffer)
796 return VK_ERROR_OUT_OF_HOST_MEMORY;
797 buffer->size = create_info->size;
798 *buffer_handle = GetHandleToBuffer(buffer);
799 return VK_SUCCESS;
800 }
801
GetBufferMemoryRequirements(VkDevice,VkBuffer buffer_handle,VkMemoryRequirements * requirements)802 void GetBufferMemoryRequirements(VkDevice,
803 VkBuffer buffer_handle,
804 VkMemoryRequirements* requirements) {
805 Buffer* buffer = GetBufferFromHandle(buffer_handle);
806 requirements->size = buffer->size;
807 requirements->alignment = 16; // allow fast Neon/SSE memcpy
808 requirements->memoryTypeBits = 0x1;
809 }
810
DestroyBuffer(VkDevice device,VkBuffer buffer_handle,const VkAllocationCallbacks * allocator)811 void DestroyBuffer(VkDevice device,
812 VkBuffer buffer_handle,
813 const VkAllocationCallbacks* allocator) {
814 if (!allocator)
815 allocator = &device->allocator;
816 Buffer* buffer = GetBufferFromHandle(buffer_handle);
817 allocator->pfnFree(allocator->pUserData, buffer);
818 }
819
820 // -----------------------------------------------------------------------------
821 // Image
822
823 struct Image {
824 typedef VkImage HandleType;
825 VkDeviceSize size;
826 };
DEFINE_OBJECT_HANDLE_CONVERSION(Image)827 DEFINE_OBJECT_HANDLE_CONVERSION(Image)
828
829 VkResult CreateImage(VkDevice device,
830 const VkImageCreateInfo* create_info,
831 const VkAllocationCallbacks* allocator,
832 VkImage* image_handle) {
833 if (create_info->imageType != VK_IMAGE_TYPE_2D ||
834 create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
835 create_info->mipLevels != 1) {
836 ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
837 create_info->imageType, create_info->format,
838 create_info->mipLevels);
839 return VK_ERROR_OUT_OF_HOST_MEMORY;
840 }
841
842 VkDeviceSize size =
843 VkDeviceSize(create_info->extent.width * create_info->extent.height) *
844 create_info->arrayLayers * create_info->samples * 4u;
845 ALOGW_IF(size > kMaxDeviceMemory,
846 "CreateImage: image size 0x%" PRIx64
847 " exceeds max device memory size 0x%" PRIx64,
848 size, kMaxDeviceMemory);
849
850 if (!allocator)
851 allocator = &device->allocator;
852 Image* image = static_cast<Image*>(allocator->pfnAllocation(
853 allocator->pUserData, sizeof(Image), alignof(Image),
854 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
855 if (!image)
856 return VK_ERROR_OUT_OF_HOST_MEMORY;
857 image->size = size;
858 *image_handle = GetHandleToImage(image);
859 return VK_SUCCESS;
860 }
861
GetImageMemoryRequirements(VkDevice,VkImage image_handle,VkMemoryRequirements * requirements)862 void GetImageMemoryRequirements(VkDevice,
863 VkImage image_handle,
864 VkMemoryRequirements* requirements) {
865 Image* image = GetImageFromHandle(image_handle);
866 requirements->size = image->size;
867 requirements->alignment = 16; // allow fast Neon/SSE memcpy
868 requirements->memoryTypeBits = 0x1;
869 }
870
DestroyImage(VkDevice device,VkImage image_handle,const VkAllocationCallbacks * allocator)871 void DestroyImage(VkDevice device,
872 VkImage image_handle,
873 const VkAllocationCallbacks* allocator) {
874 if (!allocator)
875 allocator = &device->allocator;
876 Image* image = GetImageFromHandle(image_handle);
877 allocator->pfnFree(allocator->pUserData, image);
878 }
879
GetSwapchainGrallocUsageANDROID(VkDevice,VkFormat,VkImageUsageFlags,int * grallocUsage)880 VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
881 VkFormat,
882 VkImageUsageFlags,
883 int* grallocUsage) {
884 // The null driver never reads or writes the gralloc buffer
885 *grallocUsage = 0;
886 return VK_SUCCESS;
887 }
888
AcquireImageANDROID(VkDevice,VkImage,int fence,VkSemaphore,VkFence)889 VkResult AcquireImageANDROID(VkDevice,
890 VkImage,
891 int fence,
892 VkSemaphore,
893 VkFence) {
894 close(fence);
895 return VK_SUCCESS;
896 }
897
QueueSignalReleaseImageANDROID(VkQueue,uint32_t,const VkSemaphore *,VkImage,int * fence)898 VkResult QueueSignalReleaseImageANDROID(VkQueue,
899 uint32_t,
900 const VkSemaphore*,
901 VkImage,
902 int* fence) {
903 *fence = -1;
904 return VK_SUCCESS;
905 }
906
907 // -----------------------------------------------------------------------------
908 // No-op types
909
CreateBufferView(VkDevice device,const VkBufferViewCreateInfo *,const VkAllocationCallbacks *,VkBufferView * view)910 VkResult CreateBufferView(VkDevice device,
911 const VkBufferViewCreateInfo*,
912 const VkAllocationCallbacks* /*allocator*/,
913 VkBufferView* view) {
914 *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
915 return VK_SUCCESS;
916 }
917
CreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo *,const VkAllocationCallbacks *,VkDescriptorPool * pool)918 VkResult CreateDescriptorPool(VkDevice device,
919 const VkDescriptorPoolCreateInfo*,
920 const VkAllocationCallbacks* /*allocator*/,
921 VkDescriptorPool* pool) {
922 *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
923 return VK_SUCCESS;
924 }
925
AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * alloc_info,VkDescriptorSet * descriptor_sets)926 VkResult AllocateDescriptorSets(VkDevice device,
927 const VkDescriptorSetAllocateInfo* alloc_info,
928 VkDescriptorSet* descriptor_sets) {
929 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
930 descriptor_sets[i] =
931 AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
932 return VK_SUCCESS;
933 }
934
CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo *,const VkAllocationCallbacks *,VkDescriptorSetLayout * layout)935 VkResult CreateDescriptorSetLayout(VkDevice device,
936 const VkDescriptorSetLayoutCreateInfo*,
937 const VkAllocationCallbacks* /*allocator*/,
938 VkDescriptorSetLayout* layout) {
939 *layout = AllocHandle<VkDescriptorSetLayout>(
940 device, HandleType::kDescriptorSetLayout);
941 return VK_SUCCESS;
942 }
943
CreateEvent(VkDevice device,const VkEventCreateInfo *,const VkAllocationCallbacks *,VkEvent * event)944 VkResult CreateEvent(VkDevice device,
945 const VkEventCreateInfo*,
946 const VkAllocationCallbacks* /*allocator*/,
947 VkEvent* event) {
948 *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
949 return VK_SUCCESS;
950 }
951
CreateFence(VkDevice device,const VkFenceCreateInfo *,const VkAllocationCallbacks *,VkFence * fence)952 VkResult CreateFence(VkDevice device,
953 const VkFenceCreateInfo*,
954 const VkAllocationCallbacks* /*allocator*/,
955 VkFence* fence) {
956 *fence = AllocHandle<VkFence>(device, HandleType::kFence);
957 return VK_SUCCESS;
958 }
959
CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo *,const VkAllocationCallbacks *,VkFramebuffer * framebuffer)960 VkResult CreateFramebuffer(VkDevice device,
961 const VkFramebufferCreateInfo*,
962 const VkAllocationCallbacks* /*allocator*/,
963 VkFramebuffer* framebuffer) {
964 *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
965 return VK_SUCCESS;
966 }
967
CreateImageView(VkDevice device,const VkImageViewCreateInfo *,const VkAllocationCallbacks *,VkImageView * view)968 VkResult CreateImageView(VkDevice device,
969 const VkImageViewCreateInfo*,
970 const VkAllocationCallbacks* /*allocator*/,
971 VkImageView* view) {
972 *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
973 return VK_SUCCESS;
974 }
975
CreateGraphicsPipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)976 VkResult CreateGraphicsPipelines(VkDevice device,
977 VkPipelineCache,
978 uint32_t count,
979 const VkGraphicsPipelineCreateInfo*,
980 const VkAllocationCallbacks* /*allocator*/,
981 VkPipeline* pipelines) {
982 for (uint32_t i = 0; i < count; i++)
983 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
984 return VK_SUCCESS;
985 }
986
CreateComputePipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkComputePipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)987 VkResult CreateComputePipelines(VkDevice device,
988 VkPipelineCache,
989 uint32_t count,
990 const VkComputePipelineCreateInfo*,
991 const VkAllocationCallbacks* /*allocator*/,
992 VkPipeline* pipelines) {
993 for (uint32_t i = 0; i < count; i++)
994 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
995 return VK_SUCCESS;
996 }
997
CreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo *,const VkAllocationCallbacks *,VkPipelineCache * cache)998 VkResult CreatePipelineCache(VkDevice device,
999 const VkPipelineCacheCreateInfo*,
1000 const VkAllocationCallbacks* /*allocator*/,
1001 VkPipelineCache* cache) {
1002 *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
1003 return VK_SUCCESS;
1004 }
1005
CreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo *,const VkAllocationCallbacks *,VkPipelineLayout * layout)1006 VkResult CreatePipelineLayout(VkDevice device,
1007 const VkPipelineLayoutCreateInfo*,
1008 const VkAllocationCallbacks* /*allocator*/,
1009 VkPipelineLayout* layout) {
1010 *layout =
1011 AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
1012 return VK_SUCCESS;
1013 }
1014
CreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo *,const VkAllocationCallbacks *,VkQueryPool * pool)1015 VkResult CreateQueryPool(VkDevice device,
1016 const VkQueryPoolCreateInfo*,
1017 const VkAllocationCallbacks* /*allocator*/,
1018 VkQueryPool* pool) {
1019 *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
1020 return VK_SUCCESS;
1021 }
1022
CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo *,const VkAllocationCallbacks *,VkRenderPass * renderpass)1023 VkResult CreateRenderPass(VkDevice device,
1024 const VkRenderPassCreateInfo*,
1025 const VkAllocationCallbacks* /*allocator*/,
1026 VkRenderPass* renderpass) {
1027 *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
1028 return VK_SUCCESS;
1029 }
1030
CreateSampler(VkDevice device,const VkSamplerCreateInfo *,const VkAllocationCallbacks *,VkSampler * sampler)1031 VkResult CreateSampler(VkDevice device,
1032 const VkSamplerCreateInfo*,
1033 const VkAllocationCallbacks* /*allocator*/,
1034 VkSampler* sampler) {
1035 *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
1036 return VK_SUCCESS;
1037 }
1038
CreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo *,const VkAllocationCallbacks *,VkSemaphore * semaphore)1039 VkResult CreateSemaphore(VkDevice device,
1040 const VkSemaphoreCreateInfo*,
1041 const VkAllocationCallbacks* /*allocator*/,
1042 VkSemaphore* semaphore) {
1043 *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
1044 return VK_SUCCESS;
1045 }
1046
CreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo *,const VkAllocationCallbacks *,VkShaderModule * module)1047 VkResult CreateShaderModule(VkDevice device,
1048 const VkShaderModuleCreateInfo*,
1049 const VkAllocationCallbacks* /*allocator*/,
1050 VkShaderModule* module) {
1051 *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
1052 return VK_SUCCESS;
1053 }
1054
CreateDebugReportCallbackEXT(VkInstance instance,const VkDebugReportCallbackCreateInfoEXT *,const VkAllocationCallbacks *,VkDebugReportCallbackEXT * callback)1055 VkResult CreateDebugReportCallbackEXT(VkInstance instance,
1056 const VkDebugReportCallbackCreateInfoEXT*,
1057 const VkAllocationCallbacks*,
1058 VkDebugReportCallbackEXT* callback) {
1059 *callback = AllocHandle<VkDebugReportCallbackEXT>(
1060 instance, HandleType::kDebugReportCallbackEXT);
1061 return VK_SUCCESS;
1062 }
1063
1064 // -----------------------------------------------------------------------------
1065 // No-op entrypoints
1066
1067 // clang-format off
1068 #pragma clang diagnostic push
1069 #pragma clang diagnostic ignored "-Wunused-parameter"
1070
GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1071 void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
1072 ALOGV("TODO: vk%s", __FUNCTION__);
1073 }
1074
GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1075 VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
1076 ALOGV("TODO: vk%s", __FUNCTION__);
1077 return VK_SUCCESS;
1078 }
1079
EnumerateInstanceLayerProperties(uint32_t * pCount,VkLayerProperties * pProperties)1080 VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
1081 ALOGV("TODO: vk%s", __FUNCTION__);
1082 return VK_SUCCESS;
1083 }
1084
QueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmitInfo,VkFence fence)1085 VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
1086 return VK_SUCCESS;
1087 }
1088
QueueWaitIdle(VkQueue queue)1089 VkResult QueueWaitIdle(VkQueue queue) {
1090 ALOGV("TODO: vk%s", __FUNCTION__);
1091 return VK_SUCCESS;
1092 }
1093
DeviceWaitIdle(VkDevice device)1094 VkResult DeviceWaitIdle(VkDevice device) {
1095 ALOGV("TODO: vk%s", __FUNCTION__);
1096 return VK_SUCCESS;
1097 }
1098
UnmapMemory(VkDevice device,VkDeviceMemory mem)1099 void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
1100 }
1101
FlushMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1102 VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1103 ALOGV("TODO: vk%s", __FUNCTION__);
1104 return VK_SUCCESS;
1105 }
1106
InvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1107 VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1108 ALOGV("TODO: vk%s", __FUNCTION__);
1109 return VK_SUCCESS;
1110 }
1111
GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)1112 void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
1113 ALOGV("TODO: vk%s", __FUNCTION__);
1114 }
1115
BindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memOffset)1116 VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
1117 return VK_SUCCESS;
1118 }
1119
BindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memOffset)1120 VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
1121 return VK_SUCCESS;
1122 }
1123
GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pNumRequirements,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1124 void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
1125 ALOGV("TODO: vk%s", __FUNCTION__);
1126 }
1127
GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pNumProperties,VkSparseImageFormatProperties * pProperties)1128 void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
1129 ALOGV("TODO: vk%s", __FUNCTION__);
1130 }
1131
QueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1132 VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
1133 ALOGV("TODO: vk%s", __FUNCTION__);
1134 return VK_SUCCESS;
1135 }
1136
DestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * allocator)1137 void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
1138 }
1139
ResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1140 VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
1141 return VK_SUCCESS;
1142 }
1143
GetFenceStatus(VkDevice device,VkFence fence)1144 VkResult GetFenceStatus(VkDevice device, VkFence fence) {
1145 ALOGV("TODO: vk%s", __FUNCTION__);
1146 return VK_SUCCESS;
1147 }
1148
WaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1149 VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
1150 return VK_SUCCESS;
1151 }
1152
DestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * allocator)1153 void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
1154 }
1155
DestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * allocator)1156 void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
1157 }
1158
GetEventStatus(VkDevice device,VkEvent event)1159 VkResult GetEventStatus(VkDevice device, VkEvent event) {
1160 ALOGV("TODO: vk%s", __FUNCTION__);
1161 return VK_SUCCESS;
1162 }
1163
SetEvent(VkDevice device,VkEvent event)1164 VkResult SetEvent(VkDevice device, VkEvent event) {
1165 ALOGV("TODO: vk%s", __FUNCTION__);
1166 return VK_SUCCESS;
1167 }
1168
ResetEvent(VkDevice device,VkEvent event)1169 VkResult ResetEvent(VkDevice device, VkEvent event) {
1170 ALOGV("TODO: vk%s", __FUNCTION__);
1171 return VK_SUCCESS;
1172 }
1173
DestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * allocator)1174 void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
1175 }
1176
GetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1177 VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1178 ALOGV("TODO: vk%s", __FUNCTION__);
1179 return VK_SUCCESS;
1180 }
1181
DestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * allocator)1182 void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
1183 }
1184
GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1185 void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
1186 ALOGV("TODO: vk%s", __FUNCTION__);
1187 }
1188
DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * allocator)1189 void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
1190 }
1191
DestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * allocator)1192 void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
1193 }
1194
DestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * allocator)1195 void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
1196 }
1197
GetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1198 VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
1199 ALOGV("TODO: vk%s", __FUNCTION__);
1200 return VK_SUCCESS;
1201 }
1202
MergePipelineCaches(VkDevice device,VkPipelineCache destCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1203 VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
1204 ALOGV("TODO: vk%s", __FUNCTION__);
1205 return VK_SUCCESS;
1206 }
1207
DestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * allocator)1208 void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
1209 }
1210
DestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * allocator)1211 void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
1212 }
1213
DestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * allocator)1214 void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
1215 }
1216
DestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * allocator)1217 void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
1218 }
1219
DestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * allocator)1220 void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
1221 }
1222
ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1223 VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
1224 ALOGV("TODO: vk%s", __FUNCTION__);
1225 return VK_SUCCESS;
1226 }
1227
UpdateDescriptorSets(VkDevice device,uint32_t writeCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t copyCount,const VkCopyDescriptorSet * pDescriptorCopies)1228 void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1229 ALOGV("TODO: vk%s", __FUNCTION__);
1230 }
1231
FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1232 VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1233 ALOGV("TODO: vk%s", __FUNCTION__);
1234 return VK_SUCCESS;
1235 }
1236
DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * allocator)1237 void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1238 }
1239
DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * allocator)1240 void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1241 }
1242
GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)1243 void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1244 ALOGV("TODO: vk%s", __FUNCTION__);
1245 }
1246
ResetCommandPool(VkDevice device,VkCommandPool cmdPool,VkCommandPoolResetFlags flags)1247 VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1248 ALOGV("TODO: vk%s", __FUNCTION__);
1249 return VK_SUCCESS;
1250 }
1251
BeginCommandBuffer(VkCommandBuffer cmdBuffer,const VkCommandBufferBeginInfo * pBeginInfo)1252 VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1253 return VK_SUCCESS;
1254 }
1255
EndCommandBuffer(VkCommandBuffer cmdBuffer)1256 VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1257 return VK_SUCCESS;
1258 }
1259
ResetCommandBuffer(VkCommandBuffer cmdBuffer,VkCommandBufferResetFlags flags)1260 VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1261 ALOGV("TODO: vk%s", __FUNCTION__);
1262 return VK_SUCCESS;
1263 }
1264
CmdBindPipeline(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)1265 void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1266 }
1267
CmdSetViewport(VkCommandBuffer cmdBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1268 void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1269 }
1270
CmdSetScissor(VkCommandBuffer cmdBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1271 void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1272 }
1273
CmdSetLineWidth(VkCommandBuffer cmdBuffer,float lineWidth)1274 void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1275 }
1276
CmdSetDepthBias(VkCommandBuffer cmdBuffer,float depthBias,float depthBiasClamp,float slopeScaledDepthBias)1277 void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1278 }
1279
CmdSetBlendConstants(VkCommandBuffer cmdBuffer,const float blendConst[4])1280 void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1281 }
1282
CmdSetDepthBounds(VkCommandBuffer cmdBuffer,float minDepthBounds,float maxDepthBounds)1283 void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1284 }
1285
CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilCompareMask)1286 void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1287 }
1288
CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilWriteMask)1289 void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1290 }
1291
CmdSetStencilReference(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilReference)1292 void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1293 }
1294
CmdBindDescriptorSets(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)1295 void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1296 }
1297
CmdBindIndexBuffer(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)1298 void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1299 }
1300
CmdBindVertexBuffers(VkCommandBuffer cmdBuffer,uint32_t startBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)1301 void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1302 }
1303
CmdDraw(VkCommandBuffer cmdBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1304 void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1305 }
1306
CmdDrawIndexed(VkCommandBuffer cmdBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1307 void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1308 }
1309
CmdDrawIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1310 void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1311 }
1312
CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1313 void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1314 }
1315
CmdDispatch(VkCommandBuffer cmdBuffer,uint32_t x,uint32_t y,uint32_t z)1316 void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1317 }
1318
CmdDispatchIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset)1319 void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1320 }
1321
CmdCopyBuffer(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkBuffer destBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1322 void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1323 }
1324
CmdCopyImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1325 void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1326 }
1327
CmdBlitImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1328 void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1329 }
1330
CmdCopyBufferToImage(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1331 void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1332 }
1333
CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer destBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1334 void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1335 }
1336
CmdUpdateBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize dataSize,const uint32_t * pData)1337 void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1338 }
1339
CmdFillBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize fillSize,uint32_t data)1340 void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1341 }
1342
CmdClearColorImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1343 void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1344 }
1345
CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1346 void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1347 }
1348
CmdClearAttachments(VkCommandBuffer cmdBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1349 void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1350 }
1351
CmdResolveImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1352 void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1353 }
1354
CmdSetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1355 void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1356 }
1357
CmdResetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1358 void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1359 }
1360
CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1361 void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1362 }
1363
CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1364 void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1365 }
1366
CmdBeginQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot,VkQueryControlFlags flags)1367 void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1368 }
1369
CmdEndQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot)1370 void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1371 }
1372
CmdResetQueryPool(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount)1373 void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1374 }
1375
CmdWriteTimestamp(VkCommandBuffer cmdBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)1376 void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1377 }
1378
CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize destStride,VkQueryResultFlags flags)1379 void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1380 }
1381
CmdPushConstants(VkCommandBuffer cmdBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t start,uint32_t length,const void * values)1382 void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1383 }
1384
CmdBeginRenderPass(VkCommandBuffer cmdBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1385 void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1386 }
1387
CmdNextSubpass(VkCommandBuffer cmdBuffer,VkSubpassContents contents)1388 void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1389 }
1390
CmdEndRenderPass(VkCommandBuffer cmdBuffer)1391 void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1392 }
1393
CmdExecuteCommands(VkCommandBuffer cmdBuffer,uint32_t cmdBuffersCount,const VkCommandBuffer * pCmdBuffers)1394 void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1395 }
1396
DestroyDebugReportCallbackEXT(VkInstance instance,VkDebugReportCallbackEXT callback,const VkAllocationCallbacks * pAllocator)1397 void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1398 }
1399
DebugReportMessageEXT(VkInstance instance,VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage)1400 void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1401 }
1402
1403 #pragma clang diagnostic pop
1404 // clang-format on
1405
1406 } // namespace null_driver
1407