1 /*
2 * Copyright © 2022 Collabora Ltd
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_meta_private.h"
25
26 #include "vk_command_buffer.h"
27 #include "vk_device.h"
28 #include "vk_pipeline.h"
29 #include "vk_util.h"
30
31 #include "util/hash_table.h"
32
33 #include <string.h>
34
35 struct cache_key {
36 VkObjectType obj_type;
37 uint32_t key_size;
38 const void *key_data;
39 };
40
41 static struct cache_key *
cache_key_create(VkObjectType obj_type,const void * key_data,size_t key_size)42 cache_key_create(VkObjectType obj_type, const void *key_data, size_t key_size)
43 {
44 assert(key_size <= UINT32_MAX);
45
46 struct cache_key *key = malloc(sizeof(*key) + key_size);
47 *key = (struct cache_key) {
48 .obj_type = obj_type,
49 .key_size = key_size,
50 .key_data = key + 1,
51 };
52 memcpy(key + 1, key_data, key_size);
53
54 return key;
55 }
56
57 static uint32_t
cache_key_hash(const void * _key)58 cache_key_hash(const void *_key)
59 {
60 const struct cache_key *key = _key;
61
62 assert(sizeof(key->obj_type) == 4);
63 uint32_t hash = _mesa_hash_u32(&key->obj_type);
64 return _mesa_hash_data_with_seed(key->key_data, key->key_size, hash);
65 }
66
67 static bool
cache_key_equal(const void * _a,const void * _b)68 cache_key_equal(const void *_a, const void *_b)
69 {
70 const struct cache_key *a = _a, *b = _b;
71 if (a->obj_type != b->obj_type || a->key_size != b->key_size)
72 return false;
73
74 return memcmp(a->key_data, b->key_data, a->key_size) == 0;
75 }
76
77 static void
destroy_object(struct vk_device * device,struct vk_object_base * obj)78 destroy_object(struct vk_device *device, struct vk_object_base *obj)
79 {
80 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
81 VkDevice _device = vk_device_to_handle(device);
82
83 switch (obj->type) {
84 case VK_OBJECT_TYPE_BUFFER:
85 disp->DestroyBuffer(_device, (VkBuffer)(uintptr_t)obj, NULL);
86 break;
87 case VK_OBJECT_TYPE_IMAGE_VIEW:
88 disp->DestroyImageView(_device, (VkImageView)(uintptr_t)obj, NULL);
89 break;
90 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
91 disp->DestroyDescriptorSetLayout(_device, (VkDescriptorSetLayout)(uintptr_t)obj, NULL);
92 break;
93 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
94 disp->DestroyPipelineLayout(_device, (VkPipelineLayout)(uintptr_t)obj, NULL);
95 break;
96 case VK_OBJECT_TYPE_PIPELINE:
97 disp->DestroyPipeline(_device, (VkPipeline)(uintptr_t)obj, NULL);
98 break;
99 case VK_OBJECT_TYPE_SAMPLER:
100 disp->DestroySampler(_device, (VkSampler)(uintptr_t)obj, NULL);
101 break;
102 default:
103 unreachable("Unsupported object type");
104 }
105 }
106
107 VkResult
vk_meta_device_init(struct vk_device * device,struct vk_meta_device * meta)108 vk_meta_device_init(struct vk_device *device,
109 struct vk_meta_device *meta)
110 {
111 memset(meta, 0, sizeof(*meta));
112
113 meta->cache = _mesa_hash_table_create(NULL, cache_key_hash,
114 cache_key_equal);
115 simple_mtx_init(&meta->cache_mtx, mtx_plain);
116
117 meta->cmd_draw_rects = vk_meta_draw_rects;
118 meta->cmd_draw_volume = vk_meta_draw_volume;
119
120 return VK_SUCCESS;
121 }
122
123 void
vk_meta_device_finish(struct vk_device * device,struct vk_meta_device * meta)124 vk_meta_device_finish(struct vk_device *device,
125 struct vk_meta_device *meta)
126 {
127 hash_table_foreach(meta->cache, entry) {
128 free((void *)entry->key);
129 destroy_object(device, entry->data);
130 }
131 _mesa_hash_table_destroy(meta->cache, NULL);
132 simple_mtx_destroy(&meta->cache_mtx);
133 }
134
135 uint64_t
vk_meta_lookup_object(struct vk_meta_device * meta,VkObjectType obj_type,const void * key_data,size_t key_size)136 vk_meta_lookup_object(struct vk_meta_device *meta,
137 VkObjectType obj_type,
138 const void *key_data, size_t key_size)
139 {
140 assert(key_size >= sizeof(enum vk_meta_object_key_type));
141 assert(*(enum vk_meta_object_key_type *)key_data !=
142 VK_META_OBJECT_KEY_TYPE_INVALD);
143
144 struct cache_key key = {
145 .obj_type = obj_type,
146 .key_size = key_size,
147 .key_data = key_data,
148 };
149
150 uint32_t hash = cache_key_hash(&key);
151
152 simple_mtx_lock(&meta->cache_mtx);
153 struct hash_entry *entry =
154 _mesa_hash_table_search_pre_hashed(meta->cache, hash, &key);
155 simple_mtx_unlock(&meta->cache_mtx);
156
157 if (entry == NULL)
158 return 0;
159
160 struct vk_object_base *obj = entry->data;
161 assert(obj->type == obj_type);
162
163 return (uint64_t)(uintptr_t)obj;
164 }
165
166 uint64_t
vk_meta_cache_object(struct vk_device * device,struct vk_meta_device * meta,const void * key_data,size_t key_size,VkObjectType obj_type,uint64_t handle)167 vk_meta_cache_object(struct vk_device *device,
168 struct vk_meta_device *meta,
169 const void *key_data, size_t key_size,
170 VkObjectType obj_type,
171 uint64_t handle)
172 {
173 assert(key_size >= sizeof(enum vk_meta_object_key_type));
174 assert(*(enum vk_meta_object_key_type *)key_data !=
175 VK_META_OBJECT_KEY_TYPE_INVALD);
176
177 struct cache_key *key = cache_key_create(obj_type, key_data, key_size);
178 struct vk_object_base *obj =
179 vk_object_base_from_u64_handle(handle, obj_type);
180
181 uint32_t hash = cache_key_hash(key);
182
183 simple_mtx_lock(&meta->cache_mtx);
184 struct hash_entry *entry =
185 _mesa_hash_table_search_pre_hashed(meta->cache, hash, key);
186 if (entry == NULL)
187 _mesa_hash_table_insert_pre_hashed(meta->cache, hash, key, obj);
188 simple_mtx_unlock(&meta->cache_mtx);
189
190 if (entry != NULL) {
191 /* We raced and found that object already in the cache */
192 free(key);
193 destroy_object(device, obj);
194 return (uint64_t)(uintptr_t)entry->data;
195 } else {
196 /* Return the newly inserted object */
197 return (uint64_t)(uintptr_t)obj;
198 }
199 }
200
201 VkResult
vk_meta_create_sampler(struct vk_device * device,struct vk_meta_device * meta,const VkSamplerCreateInfo * info,const void * key_data,size_t key_size,VkSampler * sampler_out)202 vk_meta_create_sampler(struct vk_device *device,
203 struct vk_meta_device *meta,
204 const VkSamplerCreateInfo *info,
205 const void *key_data, size_t key_size,
206 VkSampler *sampler_out)
207 {
208 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
209 VkDevice _device = vk_device_to_handle(device);
210
211 VkSampler sampler;
212 VkResult result = disp->CreateSampler(_device, info, NULL, &sampler);
213 if (result != VK_SUCCESS)
214 return result;
215
216 *sampler_out = (VkSampler)
217 vk_meta_cache_object(device, meta, key_data, key_size,
218 VK_OBJECT_TYPE_SAMPLER,
219 (uint64_t)sampler);
220 return VK_SUCCESS;
221 }
222
223 VkResult
vk_meta_create_descriptor_set_layout(struct vk_device * device,struct vk_meta_device * meta,const VkDescriptorSetLayoutCreateInfo * info,const void * key_data,size_t key_size,VkDescriptorSetLayout * layout_out)224 vk_meta_create_descriptor_set_layout(struct vk_device *device,
225 struct vk_meta_device *meta,
226 const VkDescriptorSetLayoutCreateInfo *info,
227 const void *key_data, size_t key_size,
228 VkDescriptorSetLayout *layout_out)
229 {
230 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
231 VkDevice _device = vk_device_to_handle(device);
232
233 VkDescriptorSetLayout layout;
234 VkResult result = disp->CreateDescriptorSetLayout(_device, info,
235 NULL, &layout);
236 if (result != VK_SUCCESS)
237 return result;
238
239 *layout_out = (VkDescriptorSetLayout)
240 vk_meta_cache_object(device, meta, key_data, key_size,
241 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
242 (uint64_t)layout);
243 return VK_SUCCESS;
244 }
245
246 static VkResult
vk_meta_get_descriptor_set_layout(struct vk_device * device,struct vk_meta_device * meta,const VkDescriptorSetLayoutCreateInfo * info,const void * key_data,size_t key_size,VkDescriptorSetLayout * layout_out)247 vk_meta_get_descriptor_set_layout(struct vk_device *device,
248 struct vk_meta_device *meta,
249 const VkDescriptorSetLayoutCreateInfo *info,
250 const void *key_data, size_t key_size,
251 VkDescriptorSetLayout *layout_out)
252 {
253 VkDescriptorSetLayout cached =
254 vk_meta_lookup_descriptor_set_layout(meta, key_data, key_size);
255 if (cached != VK_NULL_HANDLE) {
256 *layout_out = cached;
257 return VK_SUCCESS;
258 }
259
260 return vk_meta_create_descriptor_set_layout(device, meta, info,
261 key_data, key_size,
262 layout_out);
263 }
264
265 VkResult
vk_meta_create_pipeline_layout(struct vk_device * device,struct vk_meta_device * meta,const VkPipelineLayoutCreateInfo * info,const void * key_data,size_t key_size,VkPipelineLayout * layout_out)266 vk_meta_create_pipeline_layout(struct vk_device *device,
267 struct vk_meta_device *meta,
268 const VkPipelineLayoutCreateInfo *info,
269 const void *key_data, size_t key_size,
270 VkPipelineLayout *layout_out)
271 {
272 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
273 VkDevice _device = vk_device_to_handle(device);
274
275 VkPipelineLayout layout;
276 VkResult result = disp->CreatePipelineLayout(_device, info, NULL, &layout);
277 if (result != VK_SUCCESS)
278 return result;
279
280 *layout_out = (VkPipelineLayout)
281 vk_meta_cache_object(device, meta, key_data, key_size,
282 VK_OBJECT_TYPE_PIPELINE_LAYOUT,
283 (uint64_t)layout);
284 return VK_SUCCESS;
285 }
286
287 VkResult
vk_meta_get_pipeline_layout(struct vk_device * device,struct vk_meta_device * meta,const VkDescriptorSetLayoutCreateInfo * desc_info,const VkPushConstantRange * push_range,const void * key_data,size_t key_size,VkPipelineLayout * layout_out)288 vk_meta_get_pipeline_layout(struct vk_device *device,
289 struct vk_meta_device *meta,
290 const VkDescriptorSetLayoutCreateInfo *desc_info,
291 const VkPushConstantRange *push_range,
292 const void *key_data, size_t key_size,
293 VkPipelineLayout *layout_out)
294 {
295 VkPipelineLayout cached =
296 vk_meta_lookup_pipeline_layout(meta, key_data, key_size);
297 if (cached != VK_NULL_HANDLE) {
298 *layout_out = cached;
299 return VK_SUCCESS;
300 }
301
302 VkDescriptorSetLayout set_layout = VK_NULL_HANDLE;
303 if (desc_info != NULL) {
304 VkResult result =
305 vk_meta_get_descriptor_set_layout(device, meta, desc_info,
306 key_data, key_size, &set_layout);
307 if (result != VK_SUCCESS)
308 return result;
309 }
310
311 const VkPipelineLayoutCreateInfo layout_info = {
312 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
313 .setLayoutCount = set_layout != VK_NULL_HANDLE ? 1 : 0,
314 .pSetLayouts = &set_layout,
315 .pushConstantRangeCount = push_range != NULL ? 1 : 0,
316 .pPushConstantRanges = push_range,
317 };
318
319 return vk_meta_create_pipeline_layout(device, meta, &layout_info,
320 key_data, key_size, layout_out);
321 }
322
323 static VkResult
create_rect_list_pipeline(struct vk_device * device,struct vk_meta_device * meta,const VkGraphicsPipelineCreateInfo * info,VkPipeline * pipeline_out)324 create_rect_list_pipeline(struct vk_device *device,
325 struct vk_meta_device *meta,
326 const VkGraphicsPipelineCreateInfo *info,
327 VkPipeline *pipeline_out)
328 {
329 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
330 VkDevice _device = vk_device_to_handle(device);
331
332 VkGraphicsPipelineCreateInfo info_local = *info;
333
334 /* We always configure for layered rendering for now */
335 bool use_gs = meta->use_gs_for_layer;
336
337 STACK_ARRAY(VkPipelineShaderStageCreateInfo, stages,
338 info->stageCount + 1 + use_gs);
339 uint32_t stage_count = 0;
340
341 VkPipelineShaderStageNirCreateInfoMESA vs_nir_info = {
342 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NIR_CREATE_INFO_MESA,
343 .nir = vk_meta_draw_rects_vs_nir(meta, use_gs),
344 };
345 stages[stage_count++] = (VkPipelineShaderStageCreateInfo) {
346 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
347 .pNext = &vs_nir_info,
348 .stage = VK_SHADER_STAGE_VERTEX_BIT,
349 .pName = "main",
350 };
351
352 VkPipelineShaderStageNirCreateInfoMESA gs_nir_info;
353 if (use_gs) {
354 gs_nir_info = (VkPipelineShaderStageNirCreateInfoMESA) {
355 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NIR_CREATE_INFO_MESA,
356 .nir = vk_meta_draw_rects_gs_nir(meta),
357 };
358 stages[stage_count++] = (VkPipelineShaderStageCreateInfo) {
359 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
360 .pNext = &gs_nir_info,
361 .stage = VK_SHADER_STAGE_GEOMETRY_BIT,
362 .pName = "main",
363 };
364 }
365
366 for (uint32_t i = 0; i < info->stageCount; i++) {
367 assert(info->pStages[i].stage != VK_SHADER_STAGE_VERTEX_BIT);
368 if (use_gs)
369 assert(info->pStages[i].stage != VK_SHADER_STAGE_GEOMETRY_BIT);
370 stages[stage_count++] = info->pStages[i];
371 }
372
373 info_local.stageCount = stage_count;
374 info_local.pStages = stages;
375 info_local.pVertexInputState = &vk_meta_draw_rects_vi_state;
376 info_local.pViewportState = &vk_meta_draw_rects_vs_state;
377
378 uint32_t dyn_count = info->pDynamicState != NULL ?
379 info->pDynamicState->dynamicStateCount : 0;
380
381 STACK_ARRAY(VkDynamicState, dyn_state, dyn_count + 2);
382 for (uint32_t i = 0; i < dyn_count; i++)
383 dyn_state[i] = info->pDynamicState->pDynamicStates[i];
384
385 dyn_state[dyn_count + 0] = VK_DYNAMIC_STATE_VIEWPORT;
386 dyn_state[dyn_count + 1] = VK_DYNAMIC_STATE_SCISSOR;
387
388 const VkPipelineDynamicStateCreateInfo dyn_info = {
389 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
390 .dynamicStateCount = dyn_count + 2,
391 .pDynamicStates = dyn_state,
392 };
393
394 info_local.pDynamicState = &dyn_info;
395
396 VkResult result = disp->CreateGraphicsPipelines(_device, VK_NULL_HANDLE,
397 1, &info_local, NULL,
398 pipeline_out);
399
400 STACK_ARRAY_FINISH(dyn_state);
401
402 return result;
403 }
404
405 static const VkPipelineRasterizationStateCreateInfo default_rs_info = {
406 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
407 .depthClampEnable = false,
408 .depthBiasEnable = false,
409 .polygonMode = VK_POLYGON_MODE_FILL,
410 .cullMode = VK_CULL_MODE_NONE,
411 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
412 };
413
414 static const VkPipelineDepthStencilStateCreateInfo default_ds_info = {
415 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
416 .depthTestEnable = false,
417 .depthBoundsTestEnable = false,
418 .stencilTestEnable = false,
419 };
420
421 VkResult
vk_meta_create_graphics_pipeline(struct vk_device * device,struct vk_meta_device * meta,const VkGraphicsPipelineCreateInfo * info,const struct vk_meta_rendering_info * render,const void * key_data,size_t key_size,VkPipeline * pipeline_out)422 vk_meta_create_graphics_pipeline(struct vk_device *device,
423 struct vk_meta_device *meta,
424 const VkGraphicsPipelineCreateInfo *info,
425 const struct vk_meta_rendering_info *render,
426 const void *key_data, size_t key_size,
427 VkPipeline *pipeline_out)
428 {
429 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
430 VkDevice _device = vk_device_to_handle(device);
431 VkResult result;
432
433 VkGraphicsPipelineCreateInfo info_local = *info;
434
435 /* Add in the rendering info */
436 VkPipelineRenderingCreateInfo r_info = {
437 .sType = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
438 .viewMask = render->view_mask,
439 .colorAttachmentCount = render->color_attachment_count,
440 .pColorAttachmentFormats = render->color_attachment_formats,
441 .depthAttachmentFormat = render->depth_attachment_format,
442 .stencilAttachmentFormat = render->stencil_attachment_format,
443 };
444 __vk_append_struct(&info_local, &r_info);
445
446 /* Assume rectangle pipelines */
447 if (info_local.pInputAssemblyState == NULL)
448 info_local.pInputAssemblyState = &vk_meta_draw_rects_ia_state;
449
450 if (info_local.pRasterizationState == NULL)
451 info_local.pRasterizationState = &default_rs_info;
452
453 VkPipelineMultisampleStateCreateInfo ms_info;
454 if (info_local.pMultisampleState == NULL) {
455 ms_info = (VkPipelineMultisampleStateCreateInfo) {
456 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
457 .rasterizationSamples = render->samples,
458 };
459 info_local.pMultisampleState = &ms_info;
460 }
461
462 if (info_local.pDepthStencilState == NULL)
463 info_local.pDepthStencilState = &default_ds_info;
464
465 VkPipelineColorBlendStateCreateInfo cb_info;
466 VkPipelineColorBlendAttachmentState cb_att[MESA_VK_MAX_COLOR_ATTACHMENTS];
467 if (info_local.pColorBlendState == NULL) {
468 for (uint32_t i = 0; i < render->color_attachment_count; i++) {
469 cb_att[i] = (VkPipelineColorBlendAttachmentState) {
470 .blendEnable = false,
471 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
472 VK_COLOR_COMPONENT_G_BIT |
473 VK_COLOR_COMPONENT_B_BIT |
474 VK_COLOR_COMPONENT_A_BIT,
475 };
476 }
477 cb_info = (VkPipelineColorBlendStateCreateInfo) {
478 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
479 .attachmentCount = render->color_attachment_count,
480 .pAttachments = cb_att,
481 };
482 info_local.pColorBlendState = &cb_info;
483 }
484
485 VkPipeline pipeline;
486 if (info_local.pInputAssemblyState->topology ==
487 VK_PRIMITIVE_TOPOLOGY_META_RECT_LIST_MESA) {
488 result = create_rect_list_pipeline(device, meta,
489 &info_local,
490 &pipeline);
491 } else {
492 result = disp->CreateGraphicsPipelines(_device, VK_NULL_HANDLE,
493 1, &info_local,
494 NULL, &pipeline);
495 }
496 if (unlikely(result != VK_SUCCESS))
497 return result;
498
499 *pipeline_out = (VkPipeline)vk_meta_cache_object(device, meta,
500 key_data, key_size,
501 VK_OBJECT_TYPE_PIPELINE,
502 (uint64_t)pipeline);
503 return VK_SUCCESS;
504 }
505
506 VkResult
vk_meta_create_compute_pipeline(struct vk_device * device,struct vk_meta_device * meta,const VkComputePipelineCreateInfo * info,const void * key_data,size_t key_size,VkPipeline * pipeline_out)507 vk_meta_create_compute_pipeline(struct vk_device *device,
508 struct vk_meta_device *meta,
509 const VkComputePipelineCreateInfo *info,
510 const void *key_data, size_t key_size,
511 VkPipeline *pipeline_out)
512 {
513 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
514 VkDevice _device = vk_device_to_handle(device);
515
516 VkPipeline pipeline;
517 VkResult result = disp->CreateComputePipelines(_device, VK_NULL_HANDLE,
518 1, info, NULL, &pipeline);
519 if (result != VK_SUCCESS)
520 return result;
521
522 *pipeline_out = (VkPipeline)vk_meta_cache_object(device, meta,
523 key_data, key_size,
524 VK_OBJECT_TYPE_PIPELINE,
525 (uint64_t)pipeline);
526 return VK_SUCCESS;
527 }
528
529 void
vk_meta_object_list_init(struct vk_meta_object_list * mol)530 vk_meta_object_list_init(struct vk_meta_object_list *mol)
531 {
532 util_dynarray_init(&mol->arr, NULL);
533 }
534
535 void
vk_meta_object_list_reset(struct vk_device * device,struct vk_meta_object_list * mol)536 vk_meta_object_list_reset(struct vk_device *device,
537 struct vk_meta_object_list *mol)
538 {
539 util_dynarray_foreach(&mol->arr, struct vk_object_base *, obj)
540 destroy_object(device, *obj);
541
542 util_dynarray_clear(&mol->arr);
543 }
544
545 void
vk_meta_object_list_finish(struct vk_device * device,struct vk_meta_object_list * mol)546 vk_meta_object_list_finish(struct vk_device *device,
547 struct vk_meta_object_list *mol)
548 {
549 vk_meta_object_list_reset(device, mol);
550 util_dynarray_fini(&mol->arr);
551 }
552
553 VkResult
vk_meta_create_buffer(struct vk_command_buffer * cmd,struct vk_meta_device * meta,const VkBufferCreateInfo * info,VkBuffer * buffer_out)554 vk_meta_create_buffer(struct vk_command_buffer *cmd,
555 struct vk_meta_device *meta,
556 const VkBufferCreateInfo *info,
557 VkBuffer *buffer_out)
558 {
559 struct vk_device *device = cmd->base.device;
560 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
561 VkDevice _device = vk_device_to_handle(device);
562
563 VkResult result = disp->CreateBuffer(_device, info, NULL, buffer_out);
564 if (unlikely(result != VK_SUCCESS))
565 return result;
566
567 vk_meta_object_list_add_handle(&cmd->meta_objects,
568 VK_OBJECT_TYPE_BUFFER,
569 (uint64_t)*buffer_out);
570 return VK_SUCCESS;
571 }
572
573 VkResult
vk_meta_create_image_view(struct vk_command_buffer * cmd,struct vk_meta_device * meta,const VkImageViewCreateInfo * info,VkImageView * image_view_out)574 vk_meta_create_image_view(struct vk_command_buffer *cmd,
575 struct vk_meta_device *meta,
576 const VkImageViewCreateInfo *info,
577 VkImageView *image_view_out)
578 {
579 struct vk_device *device = cmd->base.device;
580 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
581 VkDevice _device = vk_device_to_handle(device);
582
583 VkResult result = disp->CreateImageView(_device, info, NULL, image_view_out);
584 if (unlikely(result != VK_SUCCESS))
585 return result;
586
587 vk_meta_object_list_add_handle(&cmd->meta_objects,
588 VK_OBJECT_TYPE_IMAGE_VIEW,
589 (uint64_t)*image_view_out);
590 return VK_SUCCESS;
591 }
592