1 /*
2 * Copyright © 2020 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_device.h"
25
26 #include "vk_common_entrypoints.h"
27 #include "vk_instance.h"
28 #include "vk_log.h"
29 #include "vk_physical_device.h"
30 #include "vk_queue.h"
31 #include "vk_sync.h"
32 #include "vk_sync_timeline.h"
33 #include "vk_util.h"
34 #include "util/u_debug.h"
35 #include "util/hash_table.h"
36 #include "util/perf/cpu_trace.h"
37 #include "util/ralloc.h"
38
39 static enum vk_device_timeline_mode
get_timeline_mode(struct vk_physical_device * physical_device)40 get_timeline_mode(struct vk_physical_device *physical_device)
41 {
42 if (physical_device->supported_sync_types == NULL)
43 return VK_DEVICE_TIMELINE_MODE_NONE;
44
45 const struct vk_sync_type *timeline_type = NULL;
46 for (const struct vk_sync_type *const *t =
47 physical_device->supported_sync_types; *t; t++) {
48 if ((*t)->features & VK_SYNC_FEATURE_TIMELINE) {
49 /* We can only have one timeline mode */
50 assert(timeline_type == NULL);
51 timeline_type = *t;
52 }
53 }
54
55 if (timeline_type == NULL)
56 return VK_DEVICE_TIMELINE_MODE_NONE;
57
58 if (vk_sync_type_is_vk_sync_timeline(timeline_type))
59 return VK_DEVICE_TIMELINE_MODE_EMULATED;
60
61 if (timeline_type->features & VK_SYNC_FEATURE_WAIT_BEFORE_SIGNAL)
62 return VK_DEVICE_TIMELINE_MODE_NATIVE;
63
64 /* For assisted mode, we require a few additional things of all sync types
65 * which may be used as semaphores.
66 */
67 for (const struct vk_sync_type *const *t =
68 physical_device->supported_sync_types; *t; t++) {
69 if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT) {
70 assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
71 if ((*t)->features & VK_SYNC_FEATURE_BINARY)
72 assert((*t)->features & VK_SYNC_FEATURE_CPU_RESET);
73 }
74 }
75
76 return VK_DEVICE_TIMELINE_MODE_ASSISTED;
77 }
78
79 static void
collect_enabled_features(struct vk_device * device,const VkDeviceCreateInfo * pCreateInfo)80 collect_enabled_features(struct vk_device *device,
81 const VkDeviceCreateInfo *pCreateInfo)
82 {
83 if (pCreateInfo->pEnabledFeatures)
84 vk_set_physical_device_features_1_0(&device->enabled_features, pCreateInfo->pEnabledFeatures);
85 vk_set_physical_device_features(&device->enabled_features, pCreateInfo->pNext);
86 }
87
88 VkResult
vk_device_init(struct vk_device * device,struct vk_physical_device * physical_device,const struct vk_device_dispatch_table * dispatch_table,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * alloc)89 vk_device_init(struct vk_device *device,
90 struct vk_physical_device *physical_device,
91 const struct vk_device_dispatch_table *dispatch_table,
92 const VkDeviceCreateInfo *pCreateInfo,
93 const VkAllocationCallbacks *alloc)
94 {
95 memset(device, 0, sizeof(*device));
96 vk_object_base_init(device, &device->base, VK_OBJECT_TYPE_DEVICE);
97 if (alloc != NULL)
98 device->alloc = *alloc;
99 else
100 device->alloc = physical_device->instance->alloc;
101
102 device->physical = physical_device;
103
104 if (dispatch_table) {
105 device->dispatch_table = *dispatch_table;
106
107 /* Add common entrypoints without overwriting driver-provided ones. */
108 vk_device_dispatch_table_from_entrypoints(
109 &device->dispatch_table, &vk_common_device_entrypoints, false);
110 }
111
112 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
113 int idx;
114 for (idx = 0; idx < VK_DEVICE_EXTENSION_COUNT; idx++) {
115 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
116 vk_device_extensions[idx].extensionName) == 0)
117 break;
118 }
119
120 if (idx >= VK_DEVICE_EXTENSION_COUNT)
121 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
122 "%s not supported",
123 pCreateInfo->ppEnabledExtensionNames[i]);
124
125 if (!physical_device->supported_extensions.extensions[idx])
126 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
127 "%s not supported",
128 pCreateInfo->ppEnabledExtensionNames[i]);
129
130 #ifdef ANDROID
131 if (!vk_android_allowed_device_extensions.extensions[idx])
132 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
133 "%s not supported",
134 pCreateInfo->ppEnabledExtensionNames[i]);
135 #endif
136
137 device->enabled_extensions.extensions[idx] = true;
138 }
139
140 VkResult result =
141 vk_physical_device_check_device_features(physical_device,
142 pCreateInfo);
143 if (result != VK_SUCCESS)
144 return result;
145
146 collect_enabled_features(device, pCreateInfo);
147
148 p_atomic_set(&device->private_data_next_index, 0);
149
150 list_inithead(&device->queues);
151
152 device->drm_fd = -1;
153
154 device->timeline_mode = get_timeline_mode(physical_device);
155
156 switch (device->timeline_mode) {
157 case VK_DEVICE_TIMELINE_MODE_NONE:
158 case VK_DEVICE_TIMELINE_MODE_NATIVE:
159 device->submit_mode = VK_QUEUE_SUBMIT_MODE_IMMEDIATE;
160 break;
161
162 case VK_DEVICE_TIMELINE_MODE_EMULATED:
163 device->submit_mode = VK_QUEUE_SUBMIT_MODE_DEFERRED;
164 break;
165
166 case VK_DEVICE_TIMELINE_MODE_ASSISTED:
167 if (debug_get_bool_option("MESA_VK_ENABLE_SUBMIT_THREAD", false)) {
168 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED;
169 } else {
170 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
171 }
172 break;
173
174 default:
175 unreachable("Invalid timeline mode");
176 }
177
178 #ifdef ANDROID
179 mtx_init(&device->swapchain_private_mtx, mtx_plain);
180 device->swapchain_private = NULL;
181 #endif /* ANDROID */
182
183 simple_mtx_init(&device->trace_mtx, mtx_plain);
184
185 return VK_SUCCESS;
186 }
187
188 void
vk_device_finish(struct vk_device * device)189 vk_device_finish(struct vk_device *device)
190 {
191 /* Drivers should tear down their own queues */
192 assert(list_is_empty(&device->queues));
193
194 vk_memory_trace_finish(device);
195
196 #ifdef ANDROID
197 if (device->swapchain_private) {
198 hash_table_foreach(device->swapchain_private, entry)
199 util_sparse_array_finish(entry->data);
200 ralloc_free(device->swapchain_private);
201 }
202 #endif /* ANDROID */
203
204 simple_mtx_destroy(&device->trace_mtx);
205
206 vk_object_base_finish(&device->base);
207 }
208
209 void
vk_device_enable_threaded_submit(struct vk_device * device)210 vk_device_enable_threaded_submit(struct vk_device *device)
211 {
212 /* This must be called before any queues are created */
213 assert(list_is_empty(&device->queues));
214
215 /* In order to use threaded submit, we need every sync type that can be
216 * used as a wait fence for vkQueueSubmit() to support WAIT_PENDING.
217 * It's required for cross-thread/process submit re-ordering.
218 */
219 for (const struct vk_sync_type *const *t =
220 device->physical->supported_sync_types; *t; t++) {
221 if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT)
222 assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
223 }
224
225 /* Any binary vk_sync types which will be used as permanent semaphore
226 * payloads also need to support vk_sync_type::move, but that's a lot
227 * harder to assert since it only applies to permanent semaphore payloads.
228 */
229
230 if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_THREADED)
231 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
232 }
233
234 VkResult
vk_device_flush(struct vk_device * device)235 vk_device_flush(struct vk_device *device)
236 {
237 if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_DEFERRED)
238 return VK_SUCCESS;
239
240 bool progress;
241 do {
242 progress = false;
243
244 vk_foreach_queue(queue, device) {
245 uint32_t queue_submit_count;
246 VkResult result = vk_queue_flush(queue, &queue_submit_count);
247 if (unlikely(result != VK_SUCCESS))
248 return result;
249
250 if (queue_submit_count)
251 progress = true;
252 }
253 } while (progress);
254
255 return VK_SUCCESS;
256 }
257
258 static const char *
timeline_mode_str(struct vk_device * device)259 timeline_mode_str(struct vk_device *device)
260 {
261 switch (device->timeline_mode) {
262 #define CASE(X) case VK_DEVICE_TIMELINE_MODE_##X: return #X;
263 CASE(NONE)
264 CASE(EMULATED)
265 CASE(ASSISTED)
266 CASE(NATIVE)
267 #undef CASE
268 default: return "UNKNOWN";
269 }
270 }
271
272 void
_vk_device_report_lost(struct vk_device * device)273 _vk_device_report_lost(struct vk_device *device)
274 {
275 assert(p_atomic_read(&device->_lost.lost) > 0);
276
277 device->_lost.reported = true;
278
279 vk_foreach_queue(queue, device) {
280 if (queue->_lost.lost) {
281 __vk_errorf(queue, VK_ERROR_DEVICE_LOST,
282 queue->_lost.error_file, queue->_lost.error_line,
283 "%s", queue->_lost.error_msg);
284 }
285 }
286
287 vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
288 timeline_mode_str(device));
289 }
290
291 VkResult
_vk_device_set_lost(struct vk_device * device,const char * file,int line,const char * msg,...)292 _vk_device_set_lost(struct vk_device *device,
293 const char *file, int line,
294 const char *msg, ...)
295 {
296 /* This flushes out any per-queue device lost messages */
297 if (vk_device_is_lost(device))
298 return VK_ERROR_DEVICE_LOST;
299
300 p_atomic_inc(&device->_lost.lost);
301 device->_lost.reported = true;
302
303 va_list ap;
304 va_start(ap, msg);
305 __vk_errorv(device, VK_ERROR_DEVICE_LOST, file, line, msg, ap);
306 va_end(ap);
307
308 vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
309 timeline_mode_str(device));
310
311 if (debug_get_bool_option("MESA_VK_ABORT_ON_DEVICE_LOSS", false))
312 abort();
313
314 return VK_ERROR_DEVICE_LOST;
315 }
316
317 PFN_vkVoidFunction
vk_device_get_proc_addr(const struct vk_device * device,const char * name)318 vk_device_get_proc_addr(const struct vk_device *device,
319 const char *name)
320 {
321 if (device == NULL || name == NULL)
322 return NULL;
323
324 struct vk_instance *instance = device->physical->instance;
325 return vk_device_dispatch_table_get_if_supported(&device->dispatch_table,
326 name,
327 instance->app_info.api_version,
328 &instance->enabled_extensions,
329 &device->enabled_extensions);
330 }
331
332 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_common_GetDeviceProcAddr(VkDevice _device,const char * pName)333 vk_common_GetDeviceProcAddr(VkDevice _device,
334 const char *pName)
335 {
336 VK_FROM_HANDLE(vk_device, device, _device);
337 return vk_device_get_proc_addr(device, pName);
338 }
339
340 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue(VkDevice _device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)341 vk_common_GetDeviceQueue(VkDevice _device,
342 uint32_t queueFamilyIndex,
343 uint32_t queueIndex,
344 VkQueue *pQueue)
345 {
346 VK_FROM_HANDLE(vk_device, device, _device);
347
348 const VkDeviceQueueInfo2 info = {
349 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
350 .pNext = NULL,
351 /* flags = 0 because (Vulkan spec 1.2.170 - vkGetDeviceQueue):
352 *
353 * "vkGetDeviceQueue must only be used to get queues that were
354 * created with the flags parameter of VkDeviceQueueCreateInfo set
355 * to zero. To get queues that were created with a non-zero flags
356 * parameter use vkGetDeviceQueue2."
357 */
358 .flags = 0,
359 .queueFamilyIndex = queueFamilyIndex,
360 .queueIndex = queueIndex,
361 };
362
363 device->dispatch_table.GetDeviceQueue2(_device, &info, pQueue);
364 }
365
366 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue2(VkDevice _device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)367 vk_common_GetDeviceQueue2(VkDevice _device,
368 const VkDeviceQueueInfo2 *pQueueInfo,
369 VkQueue *pQueue)
370 {
371 VK_FROM_HANDLE(vk_device, device, _device);
372
373 struct vk_queue *queue = NULL;
374 vk_foreach_queue(iter, device) {
375 if (iter->queue_family_index == pQueueInfo->queueFamilyIndex &&
376 iter->index_in_family == pQueueInfo->queueIndex) {
377 queue = iter;
378 break;
379 }
380 }
381
382 /* From the Vulkan 1.1.70 spec:
383 *
384 * "The queue returned by vkGetDeviceQueue2 must have the same flags
385 * value from this structure as that used at device creation time in a
386 * VkDeviceQueueCreateInfo instance. If no matching flags were specified
387 * at device creation time then pQueue will return VK_NULL_HANDLE."
388 */
389 if (queue && queue->flags == pQueueInfo->flags)
390 *pQueue = vk_queue_to_handle(queue);
391 else
392 *pQueue = VK_NULL_HANDLE;
393 }
394
395 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_MapMemory(VkDevice _device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)396 vk_common_MapMemory(VkDevice _device,
397 VkDeviceMemory memory,
398 VkDeviceSize offset,
399 VkDeviceSize size,
400 VkMemoryMapFlags flags,
401 void **ppData)
402 {
403 VK_FROM_HANDLE(vk_device, device, _device);
404
405 const VkMemoryMapInfoKHR info = {
406 .sType = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR,
407 .flags = flags,
408 .memory = memory,
409 .offset = offset,
410 .size = size,
411 };
412
413 return device->dispatch_table.MapMemory2KHR(_device, &info, ppData);
414 }
415
416 VKAPI_ATTR void VKAPI_CALL
vk_common_UnmapMemory(VkDevice _device,VkDeviceMemory memory)417 vk_common_UnmapMemory(VkDevice _device,
418 VkDeviceMemory memory)
419 {
420 VK_FROM_HANDLE(vk_device, device, _device);
421 ASSERTED VkResult result;
422
423 const VkMemoryUnmapInfoKHR info = {
424 .sType = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR,
425 .memory = memory,
426 };
427
428 result = device->dispatch_table.UnmapMemory2KHR(_device, &info);
429 assert(result == VK_SUCCESS);
430 }
431
432 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)433 vk_common_GetDeviceGroupPeerMemoryFeatures(
434 VkDevice device,
435 uint32_t heapIndex,
436 uint32_t localDeviceIndex,
437 uint32_t remoteDeviceIndex,
438 VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
439 {
440 assert(localDeviceIndex == 0 && remoteDeviceIndex == 0);
441 *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
442 VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
443 VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
444 VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
445 }
446
447 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageMemoryRequirements(VkDevice _device,VkImage image,VkMemoryRequirements * pMemoryRequirements)448 vk_common_GetImageMemoryRequirements(VkDevice _device,
449 VkImage image,
450 VkMemoryRequirements *pMemoryRequirements)
451 {
452 VK_FROM_HANDLE(vk_device, device, _device);
453
454 VkImageMemoryRequirementsInfo2 info = {
455 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
456 .image = image,
457 };
458 VkMemoryRequirements2 reqs = {
459 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
460 };
461 device->dispatch_table.GetImageMemoryRequirements2(_device, &info, &reqs);
462
463 *pMemoryRequirements = reqs.memoryRequirements;
464 }
465
466 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindImageMemory(VkDevice _device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)467 vk_common_BindImageMemory(VkDevice _device,
468 VkImage image,
469 VkDeviceMemory memory,
470 VkDeviceSize memoryOffset)
471 {
472 VK_FROM_HANDLE(vk_device, device, _device);
473
474 VkBindImageMemoryInfo bind = {
475 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
476 .image = image,
477 .memory = memory,
478 .memoryOffset = memoryOffset,
479 };
480
481 return device->dispatch_table.BindImageMemory2(_device, 1, &bind);
482 }
483
484 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageSparseMemoryRequirements(VkDevice _device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)485 vk_common_GetImageSparseMemoryRequirements(VkDevice _device,
486 VkImage image,
487 uint32_t *pSparseMemoryRequirementCount,
488 VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
489 {
490 VK_FROM_HANDLE(vk_device, device, _device);
491
492 VkImageSparseMemoryRequirementsInfo2 info = {
493 .sType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
494 .image = image,
495 };
496
497 if (!pSparseMemoryRequirements) {
498 device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
499 &info,
500 pSparseMemoryRequirementCount,
501 NULL);
502 return;
503 }
504
505 STACK_ARRAY(VkSparseImageMemoryRequirements2, mem_reqs2, *pSparseMemoryRequirementCount);
506
507 for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i) {
508 mem_reqs2[i].sType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
509 mem_reqs2[i].pNext = NULL;
510 }
511
512 device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
513 &info,
514 pSparseMemoryRequirementCount,
515 mem_reqs2);
516
517 for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i)
518 pSparseMemoryRequirements[i] = mem_reqs2[i].memoryRequirements;
519
520 STACK_ARRAY_FINISH(mem_reqs2);
521 }
522
523 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_DeviceWaitIdle(VkDevice _device)524 vk_common_DeviceWaitIdle(VkDevice _device)
525 {
526 MESA_TRACE_FUNC();
527
528 VK_FROM_HANDLE(vk_device, device, _device);
529 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
530
531 vk_foreach_queue(queue, device) {
532 VkResult result = disp->QueueWaitIdle(vk_queue_to_handle(queue));
533 if (result != VK_SUCCESS)
534 return result;
535 }
536
537 return VK_SUCCESS;
538 }
539
540 #ifndef _WIN32
541
542 uint64_t
vk_clock_gettime(clockid_t clock_id)543 vk_clock_gettime(clockid_t clock_id)
544 {
545 struct timespec current;
546 int ret;
547
548 ret = clock_gettime(clock_id, ¤t);
549 #ifdef CLOCK_MONOTONIC_RAW
550 if (ret < 0 && clock_id == CLOCK_MONOTONIC_RAW)
551 ret = clock_gettime(CLOCK_MONOTONIC, ¤t);
552 #endif
553 if (ret < 0)
554 return 0;
555
556 return (uint64_t)current.tv_sec * 1000000000ULL + current.tv_nsec;
557 }
558
559 #endif //!_WIN32
560
561 #define CORE_FEATURE(feature) features->feature = core->feature
562
563 bool
vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Features * core)564 vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure *ext,
565 const VkPhysicalDeviceVulkan11Features *core)
566 {
567
568 switch (ext->sType) {
569 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
570 VkPhysicalDevice16BitStorageFeatures *features = (void *)ext;
571 CORE_FEATURE(storageBuffer16BitAccess);
572 CORE_FEATURE(uniformAndStorageBuffer16BitAccess);
573 CORE_FEATURE(storagePushConstant16);
574 CORE_FEATURE(storageInputOutput16);
575 return true;
576 }
577
578 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
579 VkPhysicalDeviceMultiviewFeatures *features = (void *)ext;
580 CORE_FEATURE(multiview);
581 CORE_FEATURE(multiviewGeometryShader);
582 CORE_FEATURE(multiviewTessellationShader);
583 return true;
584 }
585
586 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
587 VkPhysicalDeviceProtectedMemoryFeatures *features = (void *)ext;
588 CORE_FEATURE(protectedMemory);
589 return true;
590 }
591
592 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
593 VkPhysicalDeviceSamplerYcbcrConversionFeatures *features = (void *) ext;
594 CORE_FEATURE(samplerYcbcrConversion);
595 return true;
596 }
597
598 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
599 VkPhysicalDeviceShaderDrawParametersFeatures *features = (void *)ext;
600 CORE_FEATURE(shaderDrawParameters);
601 return true;
602 }
603
604 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
605 VkPhysicalDeviceVariablePointersFeatures *features = (void *)ext;
606 CORE_FEATURE(variablePointersStorageBuffer);
607 CORE_FEATURE(variablePointers);
608 return true;
609 }
610
611 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
612 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
613 return true;
614
615 default:
616 return false;
617 }
618 }
619
620 bool
vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Features * core)621 vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure *ext,
622 const VkPhysicalDeviceVulkan12Features *core)
623 {
624
625 switch (ext->sType) {
626 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: {
627 VkPhysicalDevice8BitStorageFeatures *features = (void *)ext;
628 CORE_FEATURE(storageBuffer8BitAccess);
629 CORE_FEATURE(uniformAndStorageBuffer8BitAccess);
630 CORE_FEATURE(storagePushConstant8);
631 return true;
632 }
633
634 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: {
635 VkPhysicalDeviceBufferDeviceAddressFeatures *features = (void *)ext;
636 CORE_FEATURE(bufferDeviceAddress);
637 CORE_FEATURE(bufferDeviceAddressCaptureReplay);
638 CORE_FEATURE(bufferDeviceAddressMultiDevice);
639 return true;
640 }
641
642 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: {
643 VkPhysicalDeviceDescriptorIndexingFeatures *features = (void *)ext;
644 CORE_FEATURE(shaderInputAttachmentArrayDynamicIndexing);
645 CORE_FEATURE(shaderUniformTexelBufferArrayDynamicIndexing);
646 CORE_FEATURE(shaderStorageTexelBufferArrayDynamicIndexing);
647 CORE_FEATURE(shaderUniformBufferArrayNonUniformIndexing);
648 CORE_FEATURE(shaderSampledImageArrayNonUniformIndexing);
649 CORE_FEATURE(shaderStorageBufferArrayNonUniformIndexing);
650 CORE_FEATURE(shaderStorageImageArrayNonUniformIndexing);
651 CORE_FEATURE(shaderInputAttachmentArrayNonUniformIndexing);
652 CORE_FEATURE(shaderUniformTexelBufferArrayNonUniformIndexing);
653 CORE_FEATURE(shaderStorageTexelBufferArrayNonUniformIndexing);
654 CORE_FEATURE(descriptorBindingUniformBufferUpdateAfterBind);
655 CORE_FEATURE(descriptorBindingSampledImageUpdateAfterBind);
656 CORE_FEATURE(descriptorBindingStorageImageUpdateAfterBind);
657 CORE_FEATURE(descriptorBindingStorageBufferUpdateAfterBind);
658 CORE_FEATURE(descriptorBindingUniformTexelBufferUpdateAfterBind);
659 CORE_FEATURE(descriptorBindingStorageTexelBufferUpdateAfterBind);
660 CORE_FEATURE(descriptorBindingUpdateUnusedWhilePending);
661 CORE_FEATURE(descriptorBindingPartiallyBound);
662 CORE_FEATURE(descriptorBindingVariableDescriptorCount);
663 CORE_FEATURE(runtimeDescriptorArray);
664 return true;
665 }
666
667 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: {
668 VkPhysicalDeviceShaderFloat16Int8Features *features = (void *)ext;
669 CORE_FEATURE(shaderFloat16);
670 CORE_FEATURE(shaderInt8);
671 return true;
672 }
673
674 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: {
675 VkPhysicalDeviceHostQueryResetFeatures *features = (void *)ext;
676 CORE_FEATURE(hostQueryReset);
677 return true;
678 }
679
680 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: {
681 VkPhysicalDeviceImagelessFramebufferFeatures *features = (void *)ext;
682 CORE_FEATURE(imagelessFramebuffer);
683 return true;
684 }
685
686 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: {
687 VkPhysicalDeviceScalarBlockLayoutFeatures *features =(void *)ext;
688 CORE_FEATURE(scalarBlockLayout);
689 return true;
690 }
691
692 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: {
693 VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *features = (void *)ext;
694 CORE_FEATURE(separateDepthStencilLayouts);
695 return true;
696 }
697
698 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: {
699 VkPhysicalDeviceShaderAtomicInt64Features *features = (void *)ext;
700 CORE_FEATURE(shaderBufferInt64Atomics);
701 CORE_FEATURE(shaderSharedInt64Atomics);
702 return true;
703 }
704
705 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: {
706 VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *features = (void *)ext;
707 CORE_FEATURE(shaderSubgroupExtendedTypes);
708 return true;
709 }
710
711 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: {
712 VkPhysicalDeviceTimelineSemaphoreFeatures *features = (void *) ext;
713 CORE_FEATURE(timelineSemaphore);
714 return true;
715 }
716
717 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: {
718 VkPhysicalDeviceUniformBufferStandardLayoutFeatures *features = (void *)ext;
719 CORE_FEATURE(uniformBufferStandardLayout);
720 return true;
721 }
722
723 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: {
724 VkPhysicalDeviceVulkanMemoryModelFeatures *features = (void *)ext;
725 CORE_FEATURE(vulkanMemoryModel);
726 CORE_FEATURE(vulkanMemoryModelDeviceScope);
727 CORE_FEATURE(vulkanMemoryModelAvailabilityVisibilityChains);
728 return true;
729 }
730
731 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
732 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
733 return true;
734
735 default:
736 return false;
737 }
738 }
739
740 bool
vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Features * core)741 vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure *ext,
742 const VkPhysicalDeviceVulkan13Features *core)
743 {
744 switch (ext->sType) {
745 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: {
746 VkPhysicalDeviceDynamicRenderingFeatures *features = (void *)ext;
747 CORE_FEATURE(dynamicRendering);
748 return true;
749 }
750
751 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: {
752 VkPhysicalDeviceImageRobustnessFeatures *features = (void *)ext;
753 CORE_FEATURE(robustImageAccess);
754 return true;
755 }
756
757 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: {
758 VkPhysicalDeviceInlineUniformBlockFeatures *features = (void *)ext;
759 CORE_FEATURE(inlineUniformBlock);
760 CORE_FEATURE(descriptorBindingInlineUniformBlockUpdateAfterBind);
761 return true;
762 }
763
764 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: {
765 VkPhysicalDeviceMaintenance4Features *features = (void *)ext;
766 CORE_FEATURE(maintenance4);
767 return true;
768 }
769
770 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: {
771 VkPhysicalDevicePipelineCreationCacheControlFeatures *features = (void *)ext;
772 CORE_FEATURE(pipelineCreationCacheControl);
773 return true;
774 }
775
776 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: {
777 VkPhysicalDevicePrivateDataFeatures *features = (void *)ext;
778 CORE_FEATURE(privateData);
779 return true;
780 }
781
782 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: {
783 VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *features = (void *)ext;
784 CORE_FEATURE(shaderDemoteToHelperInvocation);
785 return true;
786 }
787
788 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: {
789 VkPhysicalDeviceShaderIntegerDotProductFeatures *features = (void *)ext;
790 CORE_FEATURE(shaderIntegerDotProduct);
791 return true;
792 };
793
794 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: {
795 VkPhysicalDeviceShaderTerminateInvocationFeatures *features = (void *)ext;
796 CORE_FEATURE(shaderTerminateInvocation);
797 return true;
798 }
799
800 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: {
801 VkPhysicalDeviceSubgroupSizeControlFeatures *features = (void *)ext;
802 CORE_FEATURE(subgroupSizeControl);
803 CORE_FEATURE(computeFullSubgroups);
804 return true;
805 }
806
807 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: {
808 VkPhysicalDeviceSynchronization2Features *features = (void *)ext;
809 CORE_FEATURE(synchronization2);
810 return true;
811 }
812
813 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: {
814 VkPhysicalDeviceTextureCompressionASTCHDRFeatures *features = (void *)ext;
815 CORE_FEATURE(textureCompressionASTC_HDR);
816 return true;
817 }
818
819 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: {
820 VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *features = (void *)ext;
821 CORE_FEATURE(shaderZeroInitializeWorkgroupMemory);
822 return true;
823 }
824
825 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
826 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
827 return true;
828
829 default:
830 return false;
831 }
832 }
833
834 #undef CORE_FEATURE
835
836 #define CORE_RENAMED_PROPERTY(ext_property, core_property) \
837 memcpy(&properties->ext_property, &core->core_property, sizeof(core->core_property))
838
839 #define CORE_PROPERTY(property) CORE_RENAMED_PROPERTY(property, property)
840
841 bool
vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Properties * core)842 vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure *ext,
843 const VkPhysicalDeviceVulkan11Properties *core)
844 {
845 switch (ext->sType) {
846 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
847 VkPhysicalDeviceIDProperties *properties = (void *)ext;
848 CORE_PROPERTY(deviceUUID);
849 CORE_PROPERTY(driverUUID);
850 CORE_PROPERTY(deviceLUID);
851 CORE_PROPERTY(deviceNodeMask);
852 CORE_PROPERTY(deviceLUIDValid);
853 return true;
854 }
855
856 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
857 VkPhysicalDeviceMaintenance3Properties *properties = (void *)ext;
858 CORE_PROPERTY(maxPerSetDescriptors);
859 CORE_PROPERTY(maxMemoryAllocationSize);
860 return true;
861 }
862
863 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
864 VkPhysicalDeviceMultiviewProperties *properties = (void *)ext;
865 CORE_PROPERTY(maxMultiviewViewCount);
866 CORE_PROPERTY(maxMultiviewInstanceIndex);
867 return true;
868 }
869
870 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
871 VkPhysicalDevicePointClippingProperties *properties = (void *) ext;
872 CORE_PROPERTY(pointClippingBehavior);
873 return true;
874 }
875
876 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
877 VkPhysicalDeviceProtectedMemoryProperties *properties = (void *)ext;
878 CORE_PROPERTY(protectedNoFault);
879 return true;
880 }
881
882 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
883 VkPhysicalDeviceSubgroupProperties *properties = (void *)ext;
884 CORE_PROPERTY(subgroupSize);
885 CORE_RENAMED_PROPERTY(supportedStages,
886 subgroupSupportedStages);
887 CORE_RENAMED_PROPERTY(supportedOperations,
888 subgroupSupportedOperations);
889 CORE_RENAMED_PROPERTY(quadOperationsInAllStages,
890 subgroupQuadOperationsInAllStages);
891 return true;
892 }
893
894 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
895 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
896 return true;
897
898 default:
899 return false;
900 }
901 }
902
903 bool
vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Properties * core)904 vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure *ext,
905 const VkPhysicalDeviceVulkan12Properties *core)
906 {
907 switch (ext->sType) {
908 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: {
909 VkPhysicalDeviceDepthStencilResolveProperties *properties = (void *)ext;
910 CORE_PROPERTY(supportedDepthResolveModes);
911 CORE_PROPERTY(supportedStencilResolveModes);
912 CORE_PROPERTY(independentResolveNone);
913 CORE_PROPERTY(independentResolve);
914 return true;
915 }
916
917 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: {
918 VkPhysicalDeviceDescriptorIndexingProperties *properties = (void *)ext;
919 CORE_PROPERTY(maxUpdateAfterBindDescriptorsInAllPools);
920 CORE_PROPERTY(shaderUniformBufferArrayNonUniformIndexingNative);
921 CORE_PROPERTY(shaderSampledImageArrayNonUniformIndexingNative);
922 CORE_PROPERTY(shaderStorageBufferArrayNonUniformIndexingNative);
923 CORE_PROPERTY(shaderStorageImageArrayNonUniformIndexingNative);
924 CORE_PROPERTY(shaderInputAttachmentArrayNonUniformIndexingNative);
925 CORE_PROPERTY(robustBufferAccessUpdateAfterBind);
926 CORE_PROPERTY(quadDivergentImplicitLod);
927 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSamplers);
928 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindUniformBuffers);
929 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageBuffers);
930 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSampledImages);
931 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageImages);
932 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInputAttachments);
933 CORE_PROPERTY(maxPerStageUpdateAfterBindResources);
934 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSamplers);
935 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffers);
936 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
937 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffers);
938 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
939 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSampledImages);
940 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageImages);
941 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInputAttachments);
942 return true;
943 }
944
945 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: {
946 VkPhysicalDeviceDriverProperties *properties = (void *) ext;
947 CORE_PROPERTY(driverID);
948 CORE_PROPERTY(driverName);
949 CORE_PROPERTY(driverInfo);
950 CORE_PROPERTY(conformanceVersion);
951 return true;
952 }
953
954 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: {
955 VkPhysicalDeviceSamplerFilterMinmaxProperties *properties = (void *)ext;
956 CORE_PROPERTY(filterMinmaxImageComponentMapping);
957 CORE_PROPERTY(filterMinmaxSingleComponentFormats);
958 return true;
959 }
960
961 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES : {
962 VkPhysicalDeviceFloatControlsProperties *properties = (void *)ext;
963 CORE_PROPERTY(denormBehaviorIndependence);
964 CORE_PROPERTY(roundingModeIndependence);
965 CORE_PROPERTY(shaderDenormFlushToZeroFloat16);
966 CORE_PROPERTY(shaderDenormPreserveFloat16);
967 CORE_PROPERTY(shaderRoundingModeRTEFloat16);
968 CORE_PROPERTY(shaderRoundingModeRTZFloat16);
969 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat16);
970 CORE_PROPERTY(shaderDenormFlushToZeroFloat32);
971 CORE_PROPERTY(shaderDenormPreserveFloat32);
972 CORE_PROPERTY(shaderRoundingModeRTEFloat32);
973 CORE_PROPERTY(shaderRoundingModeRTZFloat32);
974 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat32);
975 CORE_PROPERTY(shaderDenormFlushToZeroFloat64);
976 CORE_PROPERTY(shaderDenormPreserveFloat64);
977 CORE_PROPERTY(shaderRoundingModeRTEFloat64);
978 CORE_PROPERTY(shaderRoundingModeRTZFloat64);
979 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat64);
980 return true;
981 }
982
983 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: {
984 VkPhysicalDeviceTimelineSemaphoreProperties *properties = (void *) ext;
985 CORE_PROPERTY(maxTimelineSemaphoreValueDifference);
986 return true;
987 }
988
989 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
990 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
991 return true;
992
993 default:
994 return false;
995 }
996 }
997
998 bool
vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Properties * core)999 vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure *ext,
1000 const VkPhysicalDeviceVulkan13Properties *core)
1001 {
1002 switch (ext->sType) {
1003 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: {
1004 VkPhysicalDeviceInlineUniformBlockProperties *properties = (void *)ext;
1005 CORE_PROPERTY(maxInlineUniformBlockSize);
1006 CORE_PROPERTY(maxPerStageDescriptorInlineUniformBlocks);
1007 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
1008 CORE_PROPERTY(maxDescriptorSetInlineUniformBlocks);
1009 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
1010 return true;
1011 }
1012
1013 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: {
1014 VkPhysicalDeviceMaintenance4Properties *properties = (void *)ext;
1015 CORE_PROPERTY(maxBufferSize);
1016 return true;
1017 }
1018
1019 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: {
1020 VkPhysicalDeviceShaderIntegerDotProductProperties *properties = (void *)ext;
1021
1022 #define IDP_PROPERTY(x) CORE_PROPERTY(integerDotProduct##x)
1023 IDP_PROPERTY(8BitUnsignedAccelerated);
1024 IDP_PROPERTY(8BitSignedAccelerated);
1025 IDP_PROPERTY(8BitMixedSignednessAccelerated);
1026 IDP_PROPERTY(4x8BitPackedUnsignedAccelerated);
1027 IDP_PROPERTY(4x8BitPackedSignedAccelerated);
1028 IDP_PROPERTY(4x8BitPackedMixedSignednessAccelerated);
1029 IDP_PROPERTY(16BitUnsignedAccelerated);
1030 IDP_PROPERTY(16BitSignedAccelerated);
1031 IDP_PROPERTY(16BitMixedSignednessAccelerated);
1032 IDP_PROPERTY(32BitUnsignedAccelerated);
1033 IDP_PROPERTY(32BitSignedAccelerated);
1034 IDP_PROPERTY(32BitMixedSignednessAccelerated);
1035 IDP_PROPERTY(64BitUnsignedAccelerated);
1036 IDP_PROPERTY(64BitSignedAccelerated);
1037 IDP_PROPERTY(64BitMixedSignednessAccelerated);
1038 IDP_PROPERTY(AccumulatingSaturating8BitUnsignedAccelerated);
1039 IDP_PROPERTY(AccumulatingSaturating8BitSignedAccelerated);
1040 IDP_PROPERTY(AccumulatingSaturating8BitMixedSignednessAccelerated);
1041 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedUnsignedAccelerated);
1042 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedSignedAccelerated);
1043 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedMixedSignednessAccelerated);
1044 IDP_PROPERTY(AccumulatingSaturating16BitUnsignedAccelerated);
1045 IDP_PROPERTY(AccumulatingSaturating16BitSignedAccelerated);
1046 IDP_PROPERTY(AccumulatingSaturating16BitMixedSignednessAccelerated);
1047 IDP_PROPERTY(AccumulatingSaturating32BitUnsignedAccelerated);
1048 IDP_PROPERTY(AccumulatingSaturating32BitSignedAccelerated);
1049 IDP_PROPERTY(AccumulatingSaturating32BitMixedSignednessAccelerated);
1050 IDP_PROPERTY(AccumulatingSaturating64BitUnsignedAccelerated);
1051 IDP_PROPERTY(AccumulatingSaturating64BitSignedAccelerated);
1052 IDP_PROPERTY(AccumulatingSaturating64BitMixedSignednessAccelerated);
1053 #undef IDP_PROPERTY
1054 return true;
1055 }
1056
1057 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: {
1058 VkPhysicalDeviceSubgroupSizeControlProperties *properties = (void *)ext;
1059 CORE_PROPERTY(minSubgroupSize);
1060 CORE_PROPERTY(maxSubgroupSize);
1061 CORE_PROPERTY(maxComputeWorkgroupSubgroups);
1062 CORE_PROPERTY(requiredSubgroupSizeStages);
1063 return true;
1064 }
1065
1066 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: {
1067 VkPhysicalDeviceTexelBufferAlignmentProperties *properties = (void *)ext;
1068 CORE_PROPERTY(storageTexelBufferOffsetAlignmentBytes);
1069 CORE_PROPERTY(storageTexelBufferOffsetSingleTexelAlignment);
1070 CORE_PROPERTY(uniformTexelBufferOffsetAlignmentBytes);
1071 CORE_PROPERTY(uniformTexelBufferOffsetSingleTexelAlignment);
1072 return true;
1073 }
1074
1075 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
1076 vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
1077 return true;
1078
1079 default:
1080 return false;
1081 }
1082 }
1083
1084 #undef CORE_RENAMED_PROPERTY
1085 #undef CORE_PROPERTY
1086
1087