1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32 
33 #include "anv_private.h"
34 
35 /*
36  * Descriptor set layouts.
37  */
38 
39 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,VkDescriptorType type)40 anv_descriptor_data_for_type(const struct anv_physical_device *device,
41                              VkDescriptorType type)
42 {
43    enum anv_descriptor_data data = 0;
44 
45    switch (type) {
46    case VK_DESCRIPTOR_TYPE_SAMPLER:
47       data = ANV_DESCRIPTOR_SAMPLER_STATE;
48       if (device->has_bindless_samplers)
49          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
50       break;
51 
52    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
53       data = ANV_DESCRIPTOR_SURFACE_STATE |
54              ANV_DESCRIPTOR_SAMPLER_STATE;
55       if (device->has_bindless_images || device->has_bindless_samplers)
56          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
57       break;
58 
59    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
61       data = ANV_DESCRIPTOR_SURFACE_STATE;
62       if (device->has_bindless_images)
63          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
64       break;
65 
66    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
67       data = ANV_DESCRIPTOR_SURFACE_STATE;
68       break;
69 
70    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
71    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
72       data = ANV_DESCRIPTOR_SURFACE_STATE;
73       if (device->info.gen < 9)
74          data |= ANV_DESCRIPTOR_IMAGE_PARAM;
75       if (device->has_bindless_images)
76          data |= ANV_DESCRIPTOR_STORAGE_IMAGE;
77       break;
78 
79    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
80    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
81       data = ANV_DESCRIPTOR_SURFACE_STATE |
82              ANV_DESCRIPTOR_BUFFER_VIEW;
83       break;
84 
85    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
86    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
87       data = ANV_DESCRIPTOR_SURFACE_STATE;
88       break;
89 
90    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
91       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
92       break;
93 
94    default:
95       unreachable("Unsupported descriptor type");
96    }
97 
98    /* On gen8 and above when we have softpin enabled, we also need to push
99     * SSBO address ranges so that we can use A64 messages in the shader.
100     */
101    if (device->has_a64_buffer_access &&
102        (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
103         type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC))
104       data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
105 
106    /* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader
107     * Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and
108     * VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must
109     * have identity swizzle.
110     */
111    if (device->info.gen == 7 && !device->info.is_haswell &&
112        (type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
113         type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
114       data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;
115 
116    return data;
117 }
118 
119 static unsigned
anv_descriptor_data_size(enum anv_descriptor_data data)120 anv_descriptor_data_size(enum anv_descriptor_data data)
121 {
122    unsigned size = 0;
123 
124    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
125       size += sizeof(struct anv_sampled_image_descriptor);
126 
127    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
128       size += sizeof(struct anv_storage_image_descriptor);
129 
130    if (data & ANV_DESCRIPTOR_IMAGE_PARAM)
131       size += BRW_IMAGE_PARAM_SIZE * 4;
132 
133    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
134       size += sizeof(struct anv_address_range_descriptor);
135 
136    if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)
137       size += sizeof(struct anv_texture_swizzle_descriptor);
138 
139    return size;
140 }
141 
142 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_data desc_data)143 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
144                             enum anv_descriptor_data desc_data)
145 {
146    if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT ||
147        anv_descriptor_data_size(desc_data) > 0)
148       return true;
149    return false;
150 }
151 
152 /** Returns the size in bytes of each descriptor with the given layout */
153 unsigned
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout)154 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)
155 {
156    if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
157       assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
158       return layout->array_size;
159    }
160 
161    unsigned size = anv_descriptor_data_size(layout->data);
162 
163    /* For multi-planar bindings, we make every descriptor consume the maximum
164     * number of planes so we don't have to bother with walking arrays and
165     * adding things up every time.  Fortunately, YCbCr samplers aren't all
166     * that common and likely won't be in the middle of big arrays.
167     */
168    if (layout->max_plane_count > 1)
169       size *= layout->max_plane_count;
170 
171    return size;
172 }
173 
174 /** Returns the size in bytes of each descriptor of the given type
175  *
176  * This version of the function does not have access to the entire layout so
177  * it may only work on certain descriptor types where the descriptor size is
178  * entirely determined by the descriptor type.  Whenever possible, code should
179  * use anv_descriptor_size() instead.
180  */
181 unsigned
anv_descriptor_type_size(const struct anv_physical_device * pdevice,VkDescriptorType type)182 anv_descriptor_type_size(const struct anv_physical_device *pdevice,
183                          VkDescriptorType type)
184 {
185    assert(type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&
186           type != VK_DESCRIPTOR_TYPE_SAMPLER &&
187           type != VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE &&
188           type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
189 
190    return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice, type));
191 }
192 
193 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,enum anv_descriptor_data data,bool sampler)194 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
195                                       enum anv_descriptor_data data,
196                                       bool sampler)
197 {
198    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
199       assert(pdevice->has_a64_buffer_access);
200       return true;
201    }
202 
203    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
204       assert(pdevice->has_bindless_images || pdevice->has_bindless_samplers);
205       return sampler ? pdevice->has_bindless_samplers :
206                        pdevice->has_bindless_images;
207    }
208 
209    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
210       assert(pdevice->has_bindless_images);
211       return true;
212    }
213 
214    return false;
215 }
216 
217 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)218 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
219                                  const struct anv_descriptor_set_binding_layout *binding,
220                                  bool sampler)
221 {
222    return anv_descriptor_data_supports_bindless(pdevice, binding->data,
223                                                 sampler);
224 }
225 
226 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)227 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
228                                  const struct anv_descriptor_set_binding_layout *binding,
229                                  bool sampler)
230 {
231    if (pdevice->always_use_bindless)
232       return anv_descriptor_supports_bindless(pdevice, binding, sampler);
233 
234    static const VkDescriptorBindingFlagBitsEXT flags_requiring_bindless =
235       VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT |
236       VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
237       VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
238 
239    return (binding->flags & flags_requiring_bindless) != 0;
240 }
241 
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)242 void anv_GetDescriptorSetLayoutSupport(
243     VkDevice                                    _device,
244     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
245     VkDescriptorSetLayoutSupport*               pSupport)
246 {
247    ANV_FROM_HANDLE(anv_device, device, _device);
248    const struct anv_physical_device *pdevice = device->physical;
249 
250    uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
251    VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
252    bool needs_descriptor_buffer = false;
253 
254    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
255       vk_find_struct_const(pCreateInfo->pNext,
256                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
257 
258    for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
259       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
260 
261       VkDescriptorBindingFlags flags = 0;
262       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
263          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
264          flags = binding_flags_info->pBindingFlags[b];
265       }
266 
267       enum anv_descriptor_data desc_data =
268          anv_descriptor_data_for_type(pdevice, binding->descriptorType);
269 
270       if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))
271          needs_descriptor_buffer = true;
272 
273       if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
274          varying_desc_type = binding->descriptorType;
275 
276       switch (binding->descriptorType) {
277       case VK_DESCRIPTOR_TYPE_SAMPLER:
278          /* There is no real limit on samplers */
279          break;
280 
281       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
282          /* Inline uniforms don't use a binding */
283          break;
284 
285       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
286          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
287             break;
288 
289          if (binding->pImmutableSamplers) {
290             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
291                ANV_FROM_HANDLE(anv_sampler, sampler,
292                                binding->pImmutableSamplers[i]);
293                anv_foreach_stage(s, binding->stageFlags)
294                   surface_count[s] += sampler->n_planes;
295             }
296          } else {
297             anv_foreach_stage(s, binding->stageFlags)
298                surface_count[s] += binding->descriptorCount;
299          }
300          break;
301 
302       default:
303          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
304             break;
305 
306          anv_foreach_stage(s, binding->stageFlags)
307             surface_count[s] += binding->descriptorCount;
308          break;
309       }
310    }
311 
312    for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
313       if (needs_descriptor_buffer)
314          surface_count[s] += 1;
315    }
316 
317    VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
318       vk_find_struct(pSupport->pNext,
319                      DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
320    if (vdcls != NULL) {
321       if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
322          vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
323       } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
324          vdcls->maxVariableDescriptorCount = UINT16_MAX;
325       } else {
326          vdcls->maxVariableDescriptorCount = 0;
327       }
328    }
329 
330    bool supported = true;
331    for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
332       /* Our maximum binding table size is 240 and we need to reserve 8 for
333        * render targets.
334        */
335       if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
336          supported = false;
337    }
338 
339    pSupport->supported = supported;
340 }
341 
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)342 VkResult anv_CreateDescriptorSetLayout(
343     VkDevice                                    _device,
344     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
345     const VkAllocationCallbacks*                pAllocator,
346     VkDescriptorSetLayout*                      pSetLayout)
347 {
348    ANV_FROM_HANDLE(anv_device, device, _device);
349 
350    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
351 
352    uint32_t max_binding = 0;
353    uint32_t immutable_sampler_count = 0;
354    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
355       max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
356 
357       /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
358        *
359        *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
360        *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
361        *    pImmutableSamplers can be used to initialize a set of immutable
362        *    samplers. [...]  If descriptorType is not one of these descriptor
363        *    types, then pImmutableSamplers is ignored.
364        *
365        * We need to be careful here and only parse pImmutableSamplers if we
366        * have one of the right descriptor types.
367        */
368       VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
369       if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
370            desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
371           pCreateInfo->pBindings[j].pImmutableSamplers)
372          immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
373    }
374 
375    struct anv_descriptor_set_layout *set_layout;
376    struct anv_descriptor_set_binding_layout *bindings;
377    struct anv_sampler **samplers;
378 
379    /* We need to allocate decriptor set layouts off the device allocator
380     * with DEVICE scope because they are reference counted and may not be
381     * destroyed when vkDestroyDescriptorSetLayout is called.
382     */
383    ANV_MULTIALLOC(ma);
384    anv_multialloc_add(&ma, &set_layout, 1);
385    anv_multialloc_add(&ma, &bindings, max_binding + 1);
386    anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
387 
388    if (!anv_multialloc_alloc(&ma, &device->vk.alloc,
389                              VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
390       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
391 
392    memset(set_layout, 0, sizeof(*set_layout));
393    vk_object_base_init(&device->vk, &set_layout->base,
394                        VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
395    set_layout->ref_cnt = 1;
396    set_layout->binding_count = max_binding + 1;
397 
398    for (uint32_t b = 0; b <= max_binding; b++) {
399       /* Initialize all binding_layout entries to -1 */
400       memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
401 
402       set_layout->binding[b].flags = 0;
403       set_layout->binding[b].data = 0;
404       set_layout->binding[b].max_plane_count = 0;
405       set_layout->binding[b].array_size = 0;
406       set_layout->binding[b].immutable_samplers = NULL;
407    }
408 
409    /* Initialize all samplers to 0 */
410    memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
411 
412    uint32_t buffer_view_count = 0;
413    uint32_t dynamic_offset_count = 0;
414    uint32_t descriptor_buffer_size = 0;
415 
416    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
417       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
418       uint32_t b = binding->binding;
419       /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
420        * immutable_samplers pointer.  This provides us with a quick-and-dirty
421        * way to sort the bindings by binding number.
422        */
423       set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
424    }
425 
426    const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *binding_flags_info =
427       vk_find_struct_const(pCreateInfo->pNext,
428                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
429 
430    for (uint32_t b = 0; b <= max_binding; b++) {
431       /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
432        * immutable_samplers pointer.  Check for NULL (empty binding) and then
433        * reset it and compute the index.
434        */
435       if (set_layout->binding[b].immutable_samplers == NULL)
436          continue;
437       const uint32_t info_idx =
438          (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
439       set_layout->binding[b].immutable_samplers = NULL;
440 
441       const VkDescriptorSetLayoutBinding *binding =
442          &pCreateInfo->pBindings[info_idx];
443 
444       if (binding->descriptorCount == 0)
445          continue;
446 
447       set_layout->binding[b].type = binding->descriptorType;
448 
449       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
450          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
451          set_layout->binding[b].flags =
452             binding_flags_info->pBindingFlags[info_idx];
453 
454          /* From the Vulkan spec:
455           *
456           *    "If VkDescriptorSetLayoutCreateInfo::flags includes
457           *    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
458           *    all elements of pBindingFlags must not include
459           *    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
460           *    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
461           *    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
462           */
463          if (pCreateInfo->flags &
464              VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
465             assert(!(set_layout->binding[b].flags &
466                (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
467                 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
468                 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)));
469          }
470       }
471 
472       set_layout->binding[b].data =
473          anv_descriptor_data_for_type(device->physical,
474                                       binding->descriptorType);
475       set_layout->binding[b].array_size = binding->descriptorCount;
476       set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
477       set_layout->descriptor_count += binding->descriptorCount;
478 
479       if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
480          set_layout->binding[b].buffer_view_index = buffer_view_count;
481          buffer_view_count += binding->descriptorCount;
482       }
483 
484       switch (binding->descriptorType) {
485       case VK_DESCRIPTOR_TYPE_SAMPLER:
486       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
487          set_layout->binding[b].max_plane_count = 1;
488          if (binding->pImmutableSamplers) {
489             set_layout->binding[b].immutable_samplers = samplers;
490             samplers += binding->descriptorCount;
491 
492             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
493                ANV_FROM_HANDLE(anv_sampler, sampler,
494                                binding->pImmutableSamplers[i]);
495 
496                set_layout->binding[b].immutable_samplers[i] = sampler;
497                if (set_layout->binding[b].max_plane_count < sampler->n_planes)
498                   set_layout->binding[b].max_plane_count = sampler->n_planes;
499             }
500          }
501          break;
502 
503       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
504          set_layout->binding[b].max_plane_count = 1;
505          break;
506 
507       default:
508          break;
509       }
510 
511       switch (binding->descriptorType) {
512       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
513       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
514          set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
515          set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
516          dynamic_offset_count += binding->descriptorCount;
517          assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
518          break;
519 
520       default:
521          break;
522       }
523 
524       if (binding->descriptorType ==
525           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
526          /* Inline uniform blocks are specified to use the descriptor array
527           * size as the size in bytes of the block.
528           */
529          descriptor_buffer_size = align_u32(descriptor_buffer_size, 32);
530          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
531          descriptor_buffer_size += binding->descriptorCount;
532       } else {
533          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
534          descriptor_buffer_size += anv_descriptor_size(&set_layout->binding[b]) *
535                                    binding->descriptorCount;
536       }
537 
538       set_layout->shader_stages |= binding->stageFlags;
539    }
540 
541    set_layout->buffer_view_count = buffer_view_count;
542    set_layout->dynamic_offset_count = dynamic_offset_count;
543    set_layout->descriptor_buffer_size = descriptor_buffer_size;
544 
545    *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
546 
547    return VK_SUCCESS;
548 }
549 
550 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)551 anv_descriptor_set_layout_destroy(struct anv_device *device,
552                                   struct anv_descriptor_set_layout *layout)
553 {
554    assert(layout->ref_cnt == 0);
555    vk_object_base_finish(&layout->base);
556    vk_free(&device->vk.alloc, layout);
557 }
558 
559 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)560 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
561 {
562    if (set_layout->binding_count == 0)
563       return NULL;
564 
565    const struct anv_descriptor_set_binding_layout *last_binding =
566       &set_layout->binding[set_layout->binding_count - 1];
567    if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
568       return NULL;
569 
570    return last_binding;
571 }
572 
573 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)574 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
575                             uint32_t var_desc_count)
576 {
577    const struct anv_descriptor_set_binding_layout *dynamic_binding =
578       set_layout_dynamic_binding(set_layout);
579    if (dynamic_binding == NULL)
580       return set_layout->descriptor_count;
581 
582    assert(var_desc_count <= dynamic_binding->array_size);
583    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
584 
585    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
586       return set_layout->descriptor_count;
587 
588    return set_layout->descriptor_count - shrink;
589 }
590 
591 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)592 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
593                              uint32_t var_desc_count)
594 {
595    const struct anv_descriptor_set_binding_layout *dynamic_binding =
596       set_layout_dynamic_binding(set_layout);
597    if (dynamic_binding == NULL)
598       return set_layout->buffer_view_count;
599 
600    assert(var_desc_count <= dynamic_binding->array_size);
601    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
602 
603    if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
604       return set_layout->buffer_view_count;
605 
606    return set_layout->buffer_view_count - shrink;
607 }
608 
609 static uint32_t
set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)610 set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
611                                   uint32_t var_desc_count)
612 {
613    const struct anv_descriptor_set_binding_layout *dynamic_binding =
614       set_layout_dynamic_binding(set_layout);
615    if (dynamic_binding == NULL)
616       return set_layout->descriptor_buffer_size;
617 
618    assert(var_desc_count <= dynamic_binding->array_size);
619    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
620 
621    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
622       /* Inline uniform blocks are specified to use the descriptor array
623        * size as the size in bytes of the block.
624        */
625       return set_layout->descriptor_buffer_size - shrink;
626    } else {
627       return set_layout->descriptor_buffer_size -
628              shrink * anv_descriptor_size(dynamic_binding);
629    }
630 }
631 
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)632 void anv_DestroyDescriptorSetLayout(
633     VkDevice                                    _device,
634     VkDescriptorSetLayout                       _set_layout,
635     const VkAllocationCallbacks*                pAllocator)
636 {
637    ANV_FROM_HANDLE(anv_device, device, _device);
638    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
639 
640    if (!set_layout)
641       return;
642 
643    anv_descriptor_set_layout_unref(device, set_layout);
644 }
645 
646 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
647 
648 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,const struct anv_sampler * sampler)649 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
650                               const struct anv_sampler *sampler)
651 {
652    if (!sampler->conversion)
653       return;
654 
655    /* The only thing that affects the shader is ycbcr conversion */
656    _mesa_sha1_update(ctx, sampler->conversion,
657                      sizeof(*sampler->conversion));
658 }
659 
660 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_binding_layout * layout)661 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
662    const struct anv_descriptor_set_binding_layout *layout)
663 {
664    SHA1_UPDATE_VALUE(ctx, layout->flags);
665    SHA1_UPDATE_VALUE(ctx, layout->data);
666    SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
667    SHA1_UPDATE_VALUE(ctx, layout->array_size);
668    SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
669    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
670    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
671    SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
672 
673    if (layout->immutable_samplers) {
674       for (uint16_t i = 0; i < layout->array_size; i++)
675          sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
676    }
677 }
678 
679 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)680 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
681                                   const struct anv_descriptor_set_layout *layout)
682 {
683    SHA1_UPDATE_VALUE(ctx, layout->binding_count);
684    SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
685    SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
686    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
687    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
688    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);
689 
690    for (uint16_t i = 0; i < layout->binding_count; i++)
691       sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
692 }
693 
694 /*
695  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
696  * just multiple descriptor set layouts pasted together
697  */
698 
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)699 VkResult anv_CreatePipelineLayout(
700     VkDevice                                    _device,
701     const VkPipelineLayoutCreateInfo*           pCreateInfo,
702     const VkAllocationCallbacks*                pAllocator,
703     VkPipelineLayout*                           pPipelineLayout)
704 {
705    ANV_FROM_HANDLE(anv_device, device, _device);
706    struct anv_pipeline_layout *layout;
707 
708    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
709 
710    layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8,
711                        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
712    if (layout == NULL)
713       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
714 
715    vk_object_base_init(&device->vk, &layout->base,
716                        VK_OBJECT_TYPE_PIPELINE_LAYOUT);
717    layout->num_sets = pCreateInfo->setLayoutCount;
718 
719    unsigned dynamic_offset_count = 0;
720 
721    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
722       ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
723                       pCreateInfo->pSetLayouts[set]);
724       layout->set[set].layout = set_layout;
725       anv_descriptor_set_layout_ref(set_layout);
726 
727       layout->set[set].dynamic_offset_start = dynamic_offset_count;
728       for (uint32_t b = 0; b < set_layout->binding_count; b++) {
729          if (set_layout->binding[b].dynamic_offset_index < 0)
730             continue;
731 
732          dynamic_offset_count += set_layout->binding[b].array_size;
733       }
734    }
735    assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
736 
737    struct mesa_sha1 ctx;
738    _mesa_sha1_init(&ctx);
739    for (unsigned s = 0; s < layout->num_sets; s++) {
740       sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
741       _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
742                         sizeof(layout->set[s].dynamic_offset_start));
743    }
744    _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
745    _mesa_sha1_final(&ctx, layout->sha1);
746 
747    *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
748 
749    return VK_SUCCESS;
750 }
751 
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)752 void anv_DestroyPipelineLayout(
753     VkDevice                                    _device,
754     VkPipelineLayout                            _pipelineLayout,
755     const VkAllocationCallbacks*                pAllocator)
756 {
757    ANV_FROM_HANDLE(anv_device, device, _device);
758    ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
759 
760    if (!pipeline_layout)
761       return;
762 
763    for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
764       anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
765 
766    vk_object_base_finish(&pipeline_layout->base);
767    vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
768 }
769 
770 /*
771  * Descriptor pools.
772  *
773  * These are implemented using a big pool of memory and a free-list for the
774  * host memory allocations and a state_stream and a free list for the buffer
775  * view surface state. The spec allows us to fail to allocate due to
776  * fragmentation in all cases but two: 1) after pool reset, allocating up
777  * until the pool size with no freeing must succeed and 2) allocating and
778  * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
779  * and the free lists lets us recycle blocks for case 2).
780  */
781 
782 /* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to
783  * ensure we can allocate the entire BO without hitting zero.  The actual
784  * amount doesn't matter.
785  */
786 #define POOL_HEAP_OFFSET 64
787 
788 #define EMPTY 1
789 
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)790 VkResult anv_CreateDescriptorPool(
791     VkDevice                                    _device,
792     const VkDescriptorPoolCreateInfo*           pCreateInfo,
793     const VkAllocationCallbacks*                pAllocator,
794     VkDescriptorPool*                           pDescriptorPool)
795 {
796    ANV_FROM_HANDLE(anv_device, device, _device);
797    struct anv_descriptor_pool *pool;
798 
799    const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *inline_info =
800       vk_find_struct_const(pCreateInfo->pNext,
801                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT);
802 
803    uint32_t descriptor_count = 0;
804    uint32_t buffer_view_count = 0;
805    uint32_t descriptor_bo_size = 0;
806    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
807       enum anv_descriptor_data desc_data =
808          anv_descriptor_data_for_type(device->physical,
809                                       pCreateInfo->pPoolSizes[i].type);
810 
811       if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
812          buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
813 
814       unsigned desc_data_size = anv_descriptor_data_size(desc_data) *
815                                 pCreateInfo->pPoolSizes[i].descriptorCount;
816 
817       /* Combined image sampler descriptors can take up to 3 slots if they
818        * hold a YCbCr image.
819        */
820       if (pCreateInfo->pPoolSizes[i].type ==
821           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
822          desc_data_size *= 3;
823 
824       if (pCreateInfo->pPoolSizes[i].type ==
825           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
826          /* Inline uniform blocks are specified to use the descriptor array
827           * size as the size in bytes of the block.
828           */
829          assert(inline_info);
830          desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;
831       }
832 
833       descriptor_bo_size += desc_data_size;
834 
835       descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
836    }
837    /* We have to align descriptor buffer allocations to 32B so that we can
838     * push descriptor buffers.  This means that each descriptor buffer
839     * allocated may burn up to 32B of extra space to get the right alignment.
840     * (Technically, it's at most 28B because we're always going to start at
841     * least 4B aligned but we're being conservative here.)  Allocate enough
842     * extra space that we can chop it into maxSets pieces and align each one
843     * of them to 32B.
844     */
845    descriptor_bo_size += 32 * pCreateInfo->maxSets;
846    /* We align inline uniform blocks to 32B */
847    if (inline_info)
848       descriptor_bo_size += 32 * inline_info->maxInlineUniformBlockBindings;
849    descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);
850 
851    const size_t pool_size =
852       pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
853       descriptor_count * sizeof(struct anv_descriptor) +
854       buffer_view_count * sizeof(struct anv_buffer_view);
855    const size_t total_size = sizeof(*pool) + pool_size;
856 
857    pool = vk_alloc2(&device->vk.alloc, pAllocator, total_size, 8,
858                      VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
859    if (!pool)
860       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
861 
862    vk_object_base_init(&device->vk, &pool->base,
863                        VK_OBJECT_TYPE_DESCRIPTOR_POOL);
864    pool->size = pool_size;
865    pool->next = 0;
866    pool->free_list = EMPTY;
867 
868    if (descriptor_bo_size > 0) {
869       VkResult result = anv_device_alloc_bo(device,
870                                             descriptor_bo_size,
871                                             ANV_BO_ALLOC_MAPPED |
872                                             ANV_BO_ALLOC_SNOOPED,
873                                             0 /* explicit_address */,
874                                             &pool->bo);
875       if (result != VK_SUCCESS) {
876          vk_free2(&device->vk.alloc, pAllocator, pool);
877          return result;
878       }
879 
880       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);
881    } else {
882       pool->bo = NULL;
883    }
884 
885    anv_state_stream_init(&pool->surface_state_stream,
886                          &device->surface_state_pool, 4096);
887    pool->surface_state_free_list = NULL;
888 
889    list_inithead(&pool->desc_sets);
890 
891    *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
892 
893    return VK_SUCCESS;
894 }
895 
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)896 void anv_DestroyDescriptorPool(
897     VkDevice                                    _device,
898     VkDescriptorPool                            _pool,
899     const VkAllocationCallbacks*                pAllocator)
900 {
901    ANV_FROM_HANDLE(anv_device, device, _device);
902    ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
903 
904    if (!pool)
905       return;
906 
907    list_for_each_entry_safe(struct anv_descriptor_set, set,
908                             &pool->desc_sets, pool_link) {
909       anv_descriptor_set_layout_unref(device, set->layout);
910    }
911 
912    if (pool->bo) {
913       util_vma_heap_finish(&pool->bo_heap);
914       anv_device_release_bo(device, pool->bo);
915    }
916    anv_state_stream_finish(&pool->surface_state_stream);
917 
918    vk_object_base_finish(&pool->base);
919    vk_free2(&device->vk.alloc, pAllocator, pool);
920 }
921 
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)922 VkResult anv_ResetDescriptorPool(
923     VkDevice                                    _device,
924     VkDescriptorPool                            descriptorPool,
925     VkDescriptorPoolResetFlags                  flags)
926 {
927    ANV_FROM_HANDLE(anv_device, device, _device);
928    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
929 
930    list_for_each_entry_safe(struct anv_descriptor_set, set,
931                             &pool->desc_sets, pool_link) {
932       anv_descriptor_set_layout_unref(device, set->layout);
933    }
934    list_inithead(&pool->desc_sets);
935 
936    pool->next = 0;
937    pool->free_list = EMPTY;
938 
939    if (pool->bo) {
940       util_vma_heap_finish(&pool->bo_heap);
941       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);
942    }
943 
944    anv_state_stream_finish(&pool->surface_state_stream);
945    anv_state_stream_init(&pool->surface_state_stream,
946                          &device->surface_state_pool, 4096);
947    pool->surface_state_free_list = NULL;
948 
949    return VK_SUCCESS;
950 }
951 
952 struct pool_free_list_entry {
953    uint32_t next;
954    uint32_t size;
955 };
956 
957 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)958 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
959                               uint32_t size,
960                               struct anv_descriptor_set **set)
961 {
962    if (size <= pool->size - pool->next) {
963       *set = (struct anv_descriptor_set *) (pool->data + pool->next);
964       (*set)->size = size;
965       pool->next += size;
966       return VK_SUCCESS;
967    } else {
968       struct pool_free_list_entry *entry;
969       uint32_t *link = &pool->free_list;
970       for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
971          entry = (struct pool_free_list_entry *) (pool->data + f);
972          if (size <= entry->size) {
973             *link = entry->next;
974             *set = (struct anv_descriptor_set *) entry;
975             (*set)->size = entry->size;
976             return VK_SUCCESS;
977          }
978          link = &entry->next;
979       }
980 
981       if (pool->free_list != EMPTY) {
982          return vk_error(VK_ERROR_FRAGMENTED_POOL);
983       } else {
984          return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
985       }
986    }
987 }
988 
989 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)990 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
991                              struct anv_descriptor_set *set)
992 {
993    /* Put the descriptor set allocation back on the free list. */
994    const uint32_t index = (char *) set - pool->data;
995    if (index + set->size == pool->next) {
996       pool->next = index;
997    } else {
998       struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
999       entry->next = pool->free_list;
1000       entry->size = set->size;
1001       pool->free_list = (char *) entry - pool->data;
1002    }
1003 }
1004 
1005 struct surface_state_free_list_entry {
1006    void *next;
1007    struct anv_state state;
1008 };
1009 
1010 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1011 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1012 {
1013    struct surface_state_free_list_entry *entry =
1014       pool->surface_state_free_list;
1015 
1016    if (entry) {
1017       struct anv_state state = entry->state;
1018       pool->surface_state_free_list = entry->next;
1019       assert(state.alloc_size == 64);
1020       return state;
1021    } else {
1022       return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
1023    }
1024 }
1025 
1026 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1027 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1028                                struct anv_state state)
1029 {
1030    /* Put the buffer view surface state back on the free list. */
1031    struct surface_state_free_list_entry *entry = state.map;
1032    entry->next = pool->surface_state_free_list;
1033    entry->state = state;
1034    pool->surface_state_free_list = entry;
1035 }
1036 
1037 size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,uint32_t var_desc_count)1038 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1039                                uint32_t var_desc_count)
1040 {
1041    const uint32_t descriptor_count =
1042       set_layout_descriptor_count(layout, var_desc_count);
1043    const uint32_t buffer_view_count =
1044       set_layout_buffer_view_count(layout, var_desc_count);
1045 
1046    return sizeof(struct anv_descriptor_set) +
1047           descriptor_count * sizeof(struct anv_descriptor) +
1048           buffer_view_count * sizeof(struct anv_buffer_view);
1049 }
1050 
1051 VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1052 anv_descriptor_set_create(struct anv_device *device,
1053                           struct anv_descriptor_pool *pool,
1054                           struct anv_descriptor_set_layout *layout,
1055                           uint32_t var_desc_count,
1056                           struct anv_descriptor_set **out_set)
1057 {
1058    struct anv_descriptor_set *set;
1059    const size_t size = anv_descriptor_set_layout_size(layout, var_desc_count);
1060 
1061    VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1062    if (result != VK_SUCCESS)
1063       return result;
1064 
1065    uint32_t descriptor_buffer_size =
1066       set_layout_descriptor_buffer_size(layout, var_desc_count);
1067    if (descriptor_buffer_size) {
1068       /* Align the size to 32 so that alignment gaps don't cause extra holes
1069        * in the heap which can lead to bad performance.
1070        */
1071       uint32_t set_buffer_size = ALIGN(descriptor_buffer_size, 32);
1072       uint64_t pool_vma_offset =
1073          util_vma_heap_alloc(&pool->bo_heap, set_buffer_size, 32);
1074       if (pool_vma_offset == 0) {
1075          anv_descriptor_pool_free_set(pool, set);
1076          return vk_error(VK_ERROR_FRAGMENTED_POOL);
1077       }
1078       assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1079              pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1080       set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;
1081       set->desc_mem.alloc_size = set_buffer_size;
1082       set->desc_mem.map = pool->bo->map + set->desc_mem.offset;
1083 
1084       enum isl_format format =
1085          anv_isl_format_for_descriptor_type(device,
1086                                             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1087 
1088       set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1089       anv_fill_buffer_surface_state(device, set->desc_surface_state, format,
1090                                     ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1091                                     (struct anv_address) {
1092                                        .bo = pool->bo,
1093                                        .offset = set->desc_mem.offset,
1094                                     },
1095                                     descriptor_buffer_size, 1);
1096    } else {
1097       set->desc_mem = ANV_STATE_NULL;
1098       set->desc_surface_state = ANV_STATE_NULL;
1099    }
1100 
1101    vk_object_base_init(&device->vk, &set->base,
1102                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
1103    set->pool = pool;
1104    set->layout = layout;
1105    anv_descriptor_set_layout_ref(layout);
1106 
1107    set->buffer_view_count =
1108       set_layout_buffer_view_count(layout, var_desc_count);
1109    set->descriptor_count =
1110       set_layout_descriptor_count(layout, var_desc_count);
1111 
1112    set->buffer_views =
1113       (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1114 
1115    /* By defining the descriptors to be zero now, we can later verify that
1116     * a descriptor has not been populated with user data.
1117     */
1118    memset(set->descriptors, 0,
1119           sizeof(struct anv_descriptor) * set->descriptor_count);
1120 
1121    /* Go through and fill out immutable samplers if we have any */
1122    struct anv_descriptor *desc = set->descriptors;
1123    for (uint32_t b = 0; b < layout->binding_count; b++) {
1124       if (layout->binding[b].immutable_samplers) {
1125          for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1126             /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1127              * UpdateDescriptorSets if needed.  However, if the descriptor
1128              * set has an immutable sampler, UpdateDescriptorSets may never
1129              * touch it, so we need to make sure it's 100% valid now.
1130              *
1131              * We don't need to actually provide a sampler because the helper
1132              * will always write in the immutable sampler regardless of what
1133              * is in the sampler parameter.
1134              */
1135             VkDescriptorImageInfo info = { };
1136             anv_descriptor_set_write_image_view(device, set, &info,
1137                                                 VK_DESCRIPTOR_TYPE_SAMPLER,
1138                                                 b, i);
1139          }
1140       }
1141       desc += layout->binding[b].array_size;
1142    }
1143 
1144    /* Allocate surface state for the buffer views. */
1145    for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1146       set->buffer_views[b].surface_state =
1147          anv_descriptor_pool_alloc_state(pool);
1148    }
1149 
1150    list_addtail(&set->pool_link, &pool->desc_sets);
1151 
1152    *out_set = set;
1153 
1154    return VK_SUCCESS;
1155 }
1156 
1157 void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1158 anv_descriptor_set_destroy(struct anv_device *device,
1159                            struct anv_descriptor_pool *pool,
1160                            struct anv_descriptor_set *set)
1161 {
1162    anv_descriptor_set_layout_unref(device, set->layout);
1163 
1164    if (set->desc_mem.alloc_size) {
1165       util_vma_heap_free(&pool->bo_heap,
1166                          (uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,
1167                          set->desc_mem.alloc_size);
1168       anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1169    }
1170 
1171    for (uint32_t b = 0; b < set->buffer_view_count; b++)
1172       anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
1173 
1174    list_del(&set->pool_link);
1175 
1176    vk_object_base_finish(&set->base);
1177    anv_descriptor_pool_free_set(pool, set);
1178 }
1179 
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1180 VkResult anv_AllocateDescriptorSets(
1181     VkDevice                                    _device,
1182     const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1183     VkDescriptorSet*                            pDescriptorSets)
1184 {
1185    ANV_FROM_HANDLE(anv_device, device, _device);
1186    ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1187 
1188    VkResult result = VK_SUCCESS;
1189    struct anv_descriptor_set *set;
1190    uint32_t i;
1191 
1192    const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1193       vk_find_struct_const(pAllocateInfo->pNext,
1194                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1195 
1196    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1197       ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1198                       pAllocateInfo->pSetLayouts[i]);
1199 
1200       uint32_t var_desc_count = 0;
1201       if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1202          assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1203          var_desc_count = vdcai->pDescriptorCounts[i];
1204       }
1205 
1206       result = anv_descriptor_set_create(device, pool, layout,
1207                                          var_desc_count, &set);
1208       if (result != VK_SUCCESS)
1209          break;
1210 
1211       pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1212    }
1213 
1214    if (result != VK_SUCCESS)
1215       anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1216                              i, pDescriptorSets);
1217 
1218    return result;
1219 }
1220 
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1221 VkResult anv_FreeDescriptorSets(
1222     VkDevice                                    _device,
1223     VkDescriptorPool                            descriptorPool,
1224     uint32_t                                    count,
1225     const VkDescriptorSet*                      pDescriptorSets)
1226 {
1227    ANV_FROM_HANDLE(anv_device, device, _device);
1228    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1229 
1230    for (uint32_t i = 0; i < count; i++) {
1231       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1232 
1233       if (!set)
1234          continue;
1235 
1236       anv_descriptor_set_destroy(device, pool, set);
1237    }
1238 
1239    return VK_SUCCESS;
1240 }
1241 
1242 static void
anv_descriptor_set_write_image_param(uint32_t * param_desc_map,const struct brw_image_param * param)1243 anv_descriptor_set_write_image_param(uint32_t *param_desc_map,
1244                                      const struct brw_image_param *param)
1245 {
1246 #define WRITE_PARAM_FIELD(field, FIELD) \
1247    for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1248       param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1249 
1250    WRITE_PARAM_FIELD(offset, OFFSET);
1251    WRITE_PARAM_FIELD(size, SIZE);
1252    WRITE_PARAM_FIELD(stride, STRIDE);
1253    WRITE_PARAM_FIELD(tiling, TILING);
1254    WRITE_PARAM_FIELD(swizzling, SWIZZLING);
1255    WRITE_PARAM_FIELD(size, SIZE);
1256 
1257 #undef WRITE_PARAM_FIELD
1258 }
1259 
1260 static uint32_t
anv_surface_state_to_handle(struct anv_state state)1261 anv_surface_state_to_handle(struct anv_state state)
1262 {
1263    /* Bits 31:12 of the bindless surface offset in the extended message
1264     * descriptor is bits 25:6 of the byte-based address.
1265     */
1266    assert(state.offset >= 0);
1267    uint32_t offset = state.offset;
1268    assert((offset & 0x3f) == 0 && offset < (1 << 26));
1269    return offset << 6;
1270 }
1271 
1272 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)1273 anv_descriptor_set_write_image_view(struct anv_device *device,
1274                                     struct anv_descriptor_set *set,
1275                                     const VkDescriptorImageInfo * const info,
1276                                     VkDescriptorType type,
1277                                     uint32_t binding,
1278                                     uint32_t element)
1279 {
1280    const struct anv_descriptor_set_binding_layout *bind_layout =
1281       &set->layout->binding[binding];
1282    struct anv_descriptor *desc =
1283       &set->descriptors[bind_layout->descriptor_index + element];
1284    struct anv_image_view *image_view = NULL;
1285    struct anv_sampler *sampler = NULL;
1286 
1287    /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1288     * set initialization to set the bindless samplers.
1289     */
1290    assert(type == bind_layout->type ||
1291           type == VK_DESCRIPTOR_TYPE_SAMPLER);
1292 
1293    switch (type) {
1294    case VK_DESCRIPTOR_TYPE_SAMPLER:
1295       sampler = bind_layout->immutable_samplers ?
1296                 bind_layout->immutable_samplers[element] :
1297                 anv_sampler_from_handle(info->sampler);
1298       break;
1299 
1300    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1301       image_view = anv_image_view_from_handle(info->imageView);
1302       sampler = bind_layout->immutable_samplers ?
1303                 bind_layout->immutable_samplers[element] :
1304                 anv_sampler_from_handle(info->sampler);
1305       break;
1306 
1307    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1308    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1309    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1310       image_view = anv_image_view_from_handle(info->imageView);
1311       break;
1312 
1313    default:
1314       unreachable("invalid descriptor type");
1315    }
1316 
1317    *desc = (struct anv_descriptor) {
1318       .type = type,
1319       .layout = info->imageLayout,
1320       .image_view = image_view,
1321       .sampler = sampler,
1322    };
1323 
1324    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1325                     element * anv_descriptor_size(bind_layout);
1326    memset(desc_map, 0, anv_descriptor_size(bind_layout));
1327 
1328    if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1329       struct anv_sampled_image_descriptor desc_data[3];
1330       memset(desc_data, 0, sizeof(desc_data));
1331 
1332       if (image_view) {
1333          for (unsigned p = 0; p < image_view->n_planes; p++) {
1334             struct anv_surface_state sstate =
1335                (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1336                image_view->planes[p].general_sampler_surface_state :
1337                image_view->planes[p].optimal_sampler_surface_state;
1338             desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1339          }
1340       }
1341 
1342       if (sampler) {
1343          for (unsigned p = 0; p < sampler->n_planes; p++)
1344             desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
1345       }
1346 
1347       /* We may have max_plane_count < 0 if this isn't a sampled image but it
1348        * can be no more than the size of our array of handles.
1349        */
1350       assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
1351       memcpy(desc_map, desc_data,
1352              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1353    }
1354 
1355    if (image_view == NULL)
1356       return;
1357 
1358    if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1359       assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1360       assert(image_view->n_planes == 1);
1361       struct anv_storage_image_descriptor desc_data = {
1362          .read_write = anv_surface_state_to_handle(
1363                            image_view->planes[0].storage_surface_state.state),
1364          .write_only = anv_surface_state_to_handle(
1365                            image_view->planes[0].writeonly_storage_surface_state.state),
1366       };
1367       memcpy(desc_map, &desc_data, sizeof(desc_data));
1368    }
1369 
1370    if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1371       /* Storage images can only ever have one plane */
1372       assert(image_view->n_planes == 1);
1373       const struct brw_image_param *image_param =
1374          &image_view->planes[0].storage_image_param;
1375 
1376       anv_descriptor_set_write_image_param(desc_map, image_param);
1377    }
1378 
1379    if (bind_layout->data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {
1380       assert(!(bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE));
1381       assert(image_view);
1382       struct anv_texture_swizzle_descriptor desc_data[3];
1383       memset(desc_data, 0, sizeof(desc_data));
1384 
1385       for (unsigned p = 0; p < image_view->n_planes; p++) {
1386          desc_data[p] = (struct anv_texture_swizzle_descriptor) {
1387             .swizzle = {
1388                (uint8_t)image_view->planes[p].isl.swizzle.r,
1389                (uint8_t)image_view->planes[p].isl.swizzle.g,
1390                (uint8_t)image_view->planes[p].isl.swizzle.b,
1391                (uint8_t)image_view->planes[p].isl.swizzle.a,
1392             },
1393          };
1394       }
1395       memcpy(desc_map, desc_data,
1396              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1397    }
1398 }
1399 
1400 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)1401 anv_descriptor_set_write_buffer_view(struct anv_device *device,
1402                                      struct anv_descriptor_set *set,
1403                                      VkDescriptorType type,
1404                                      struct anv_buffer_view *buffer_view,
1405                                      uint32_t binding,
1406                                      uint32_t element)
1407 {
1408    const struct anv_descriptor_set_binding_layout *bind_layout =
1409       &set->layout->binding[binding];
1410    struct anv_descriptor *desc =
1411       &set->descriptors[bind_layout->descriptor_index + element];
1412 
1413    assert(type == bind_layout->type);
1414 
1415    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1416                     element * anv_descriptor_size(bind_layout);
1417 
1418    if (buffer_view == NULL) {
1419       *desc = (struct anv_descriptor) { .type = type, };
1420       memset(desc_map, 0, anv_descriptor_size(bind_layout));
1421       return;
1422    }
1423 
1424    *desc = (struct anv_descriptor) {
1425       .type = type,
1426       .buffer_view = buffer_view,
1427    };
1428 
1429    if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1430       struct anv_sampled_image_descriptor desc_data = {
1431          .image = anv_surface_state_to_handle(buffer_view->surface_state),
1432       };
1433       memcpy(desc_map, &desc_data, sizeof(desc_data));
1434    }
1435 
1436    if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1437       assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1438       struct anv_storage_image_descriptor desc_data = {
1439          .read_write = anv_surface_state_to_handle(
1440                            buffer_view->storage_surface_state),
1441          .write_only = anv_surface_state_to_handle(
1442                            buffer_view->writeonly_storage_surface_state),
1443       };
1444       memcpy(desc_map, &desc_data, sizeof(desc_data));
1445    }
1446 
1447    if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1448       anv_descriptor_set_write_image_param(desc_map,
1449                                            &buffer_view->storage_image_param);
1450    }
1451 }
1452 
1453 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)1454 anv_descriptor_set_write_buffer(struct anv_device *device,
1455                                 struct anv_descriptor_set *set,
1456                                 struct anv_state_stream *alloc_stream,
1457                                 VkDescriptorType type,
1458                                 struct anv_buffer *buffer,
1459                                 uint32_t binding,
1460                                 uint32_t element,
1461                                 VkDeviceSize offset,
1462                                 VkDeviceSize range)
1463 {
1464    const struct anv_descriptor_set_binding_layout *bind_layout =
1465       &set->layout->binding[binding];
1466    struct anv_descriptor *desc =
1467       &set->descriptors[bind_layout->descriptor_index + element];
1468 
1469    assert(type == bind_layout->type);
1470 
1471    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1472                     element * anv_descriptor_size(bind_layout);
1473 
1474    if (buffer == NULL) {
1475       *desc = (struct anv_descriptor) { .type = type, };
1476       memset(desc_map, 0, anv_descriptor_size(bind_layout));
1477       return;
1478    }
1479 
1480    struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1481    uint64_t bind_range = anv_buffer_get_range(buffer, offset, range);
1482 
1483    /* We report a bounds checking alignment of 32B for the sake of block
1484     * messages which read an entire register worth at a time.
1485     */
1486    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1487        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1488       bind_range = align_u64(bind_range, ANV_UBO_ALIGNMENT);
1489 
1490    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1491        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
1492       *desc = (struct anv_descriptor) {
1493          .type = type,
1494          .buffer = buffer,
1495          .offset = offset,
1496          .range = range,
1497       };
1498    } else {
1499       assert(bind_layout->data & ANV_DESCRIPTOR_BUFFER_VIEW);
1500       struct anv_buffer_view *bview =
1501          &set->buffer_views[bind_layout->buffer_view_index + element];
1502 
1503       bview->format = anv_isl_format_for_descriptor_type(device, type);
1504       bview->range = bind_range;
1505       bview->address = bind_addr;
1506 
1507       /* If we're writing descriptors through a push command, we need to
1508        * allocate the surface state from the command buffer. Otherwise it will
1509        * be allocated by the descriptor pool when calling
1510        * vkAllocateDescriptorSets. */
1511       if (alloc_stream)
1512          bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
1513 
1514       isl_surf_usage_flags_t usage =
1515          (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1516           type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ?
1517          ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
1518          ISL_SURF_USAGE_STORAGE_BIT;
1519 
1520       anv_fill_buffer_surface_state(device, bview->surface_state,
1521                                     bview->format, usage,
1522                                     bind_addr, bind_range, 1);
1523 
1524       *desc = (struct anv_descriptor) {
1525          .type = type,
1526          .buffer_view = bview,
1527       };
1528    }
1529 
1530    if (bind_layout->data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1531       struct anv_address_range_descriptor desc_data = {
1532          .address = anv_address_physical(bind_addr),
1533          .range = bind_range,
1534       };
1535       memcpy(desc_map, &desc_data, sizeof(desc_data));
1536    }
1537 }
1538 
1539 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)1540 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
1541                                              struct anv_descriptor_set *set,
1542                                              uint32_t binding,
1543                                              const void *data,
1544                                              size_t offset,
1545                                              size_t size)
1546 {
1547    const struct anv_descriptor_set_binding_layout *bind_layout =
1548       &set->layout->binding[binding];
1549 
1550    assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
1551 
1552    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;
1553 
1554    memcpy(desc_map + offset, data, size);
1555 }
1556 
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)1557 void anv_UpdateDescriptorSets(
1558     VkDevice                                    _device,
1559     uint32_t                                    descriptorWriteCount,
1560     const VkWriteDescriptorSet*                 pDescriptorWrites,
1561     uint32_t                                    descriptorCopyCount,
1562     const VkCopyDescriptorSet*                  pDescriptorCopies)
1563 {
1564    ANV_FROM_HANDLE(anv_device, device, _device);
1565 
1566    for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1567       const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1568       ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
1569 
1570       switch (write->descriptorType) {
1571       case VK_DESCRIPTOR_TYPE_SAMPLER:
1572       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1573       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1574       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1575       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1576          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1577             anv_descriptor_set_write_image_view(device, set,
1578                                                 write->pImageInfo + j,
1579                                                 write->descriptorType,
1580                                                 write->dstBinding,
1581                                                 write->dstArrayElement + j);
1582          }
1583          break;
1584 
1585       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1586       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1587          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1588             ANV_FROM_HANDLE(anv_buffer_view, bview,
1589                             write->pTexelBufferView[j]);
1590 
1591             anv_descriptor_set_write_buffer_view(device, set,
1592                                                  write->descriptorType,
1593                                                  bview,
1594                                                  write->dstBinding,
1595                                                  write->dstArrayElement + j);
1596          }
1597          break;
1598 
1599       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1600       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1601       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1602       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1603          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1604             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1605 
1606             anv_descriptor_set_write_buffer(device, set,
1607                                             NULL,
1608                                             write->descriptorType,
1609                                             buffer,
1610                                             write->dstBinding,
1611                                             write->dstArrayElement + j,
1612                                             write->pBufferInfo[j].offset,
1613                                             write->pBufferInfo[j].range);
1614          }
1615          break;
1616 
1617       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
1618          const VkWriteDescriptorSetInlineUniformBlockEXT *inline_write =
1619             vk_find_struct_const(write->pNext,
1620                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT);
1621          assert(inline_write->dataSize == write->descriptorCount);
1622          anv_descriptor_set_write_inline_uniform_data(device, set,
1623                                                       write->dstBinding,
1624                                                       inline_write->pData,
1625                                                       write->dstArrayElement,
1626                                                       inline_write->dataSize);
1627          break;
1628       }
1629 
1630       default:
1631          break;
1632       }
1633    }
1634 
1635    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1636       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1637       ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
1638       ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
1639 
1640       const struct anv_descriptor_set_binding_layout *src_layout =
1641          &src->layout->binding[copy->srcBinding];
1642       struct anv_descriptor *src_desc =
1643          &src->descriptors[src_layout->descriptor_index];
1644       src_desc += copy->srcArrayElement;
1645 
1646       const struct anv_descriptor_set_binding_layout *dst_layout =
1647          &dst->layout->binding[copy->dstBinding];
1648       struct anv_descriptor *dst_desc =
1649          &dst->descriptors[dst_layout->descriptor_index];
1650       dst_desc += copy->dstArrayElement;
1651 
1652       if (src_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
1653          assert(src_layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
1654          memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1655                                     copy->dstArrayElement,
1656                 src->desc_mem.map + src_layout->descriptor_offset +
1657                                     copy->srcArrayElement,
1658                 copy->descriptorCount);
1659       } else {
1660          for (uint32_t j = 0; j < copy->descriptorCount; j++)
1661             dst_desc[j] = src_desc[j];
1662 
1663          unsigned desc_size = anv_descriptor_size(src_layout);
1664          if (desc_size > 0) {
1665             assert(desc_size == anv_descriptor_size(dst_layout));
1666             memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1667                                        copy->dstArrayElement * desc_size,
1668                    src->desc_mem.map + src_layout->descriptor_offset +
1669                                        copy->srcArrayElement * desc_size,
1670                    copy->descriptorCount * desc_size);
1671          }
1672       }
1673    }
1674 }
1675 
1676 /*
1677  * Descriptor update templates.
1678  */
1679 
1680 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,const struct anv_descriptor_update_template * template,const void * data)1681 anv_descriptor_set_write_template(struct anv_device *device,
1682                                   struct anv_descriptor_set *set,
1683                                   struct anv_state_stream *alloc_stream,
1684                                   const struct anv_descriptor_update_template *template,
1685                                   const void *data)
1686 {
1687    for (uint32_t i = 0; i < template->entry_count; i++) {
1688       const struct anv_descriptor_template_entry *entry =
1689          &template->entries[i];
1690 
1691       switch (entry->type) {
1692       case VK_DESCRIPTOR_TYPE_SAMPLER:
1693       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1694       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1695       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1696       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1697          for (uint32_t j = 0; j < entry->array_count; j++) {
1698             const VkDescriptorImageInfo *info =
1699                data + entry->offset + j * entry->stride;
1700             anv_descriptor_set_write_image_view(device, set,
1701                                                 info, entry->type,
1702                                                 entry->binding,
1703                                                 entry->array_element + j);
1704          }
1705          break;
1706 
1707       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1708       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1709          for (uint32_t j = 0; j < entry->array_count; j++) {
1710             const VkBufferView *_bview =
1711                data + entry->offset + j * entry->stride;
1712             ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
1713 
1714             anv_descriptor_set_write_buffer_view(device, set,
1715                                                  entry->type,
1716                                                  bview,
1717                                                  entry->binding,
1718                                                  entry->array_element + j);
1719          }
1720          break;
1721 
1722       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1723       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1724       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1725       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1726          for (uint32_t j = 0; j < entry->array_count; j++) {
1727             const VkDescriptorBufferInfo *info =
1728                data + entry->offset + j * entry->stride;
1729             ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
1730 
1731             anv_descriptor_set_write_buffer(device, set,
1732                                             alloc_stream,
1733                                             entry->type,
1734                                             buffer,
1735                                             entry->binding,
1736                                             entry->array_element + j,
1737                                             info->offset, info->range);
1738          }
1739          break;
1740 
1741       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
1742          anv_descriptor_set_write_inline_uniform_data(device, set,
1743                                                       entry->binding,
1744                                                       data + entry->offset,
1745                                                       entry->array_element,
1746                                                       entry->array_count);
1747          break;
1748 
1749       default:
1750          break;
1751       }
1752    }
1753 }
1754 
anv_CreateDescriptorUpdateTemplate(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1755 VkResult anv_CreateDescriptorUpdateTemplate(
1756     VkDevice                                    _device,
1757     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
1758     const VkAllocationCallbacks*                pAllocator,
1759     VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate)
1760 {
1761    ANV_FROM_HANDLE(anv_device, device, _device);
1762    struct anv_descriptor_update_template *template;
1763 
1764    size_t size = sizeof(*template) +
1765       pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1766    template = vk_alloc2(&device->vk.alloc, pAllocator, size, 8,
1767                         VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1768    if (template == NULL)
1769       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1770 
1771    vk_object_base_init(&device->vk, &template->base,
1772                        VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1773    template->bind_point = pCreateInfo->pipelineBindPoint;
1774 
1775    if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
1776       template->set = pCreateInfo->set;
1777 
1778    template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1779    for (uint32_t i = 0; i < template->entry_count; i++) {
1780       const VkDescriptorUpdateTemplateEntry *pEntry =
1781          &pCreateInfo->pDescriptorUpdateEntries[i];
1782 
1783       template->entries[i] = (struct anv_descriptor_template_entry) {
1784          .type = pEntry->descriptorType,
1785          .binding = pEntry->dstBinding,
1786          .array_element = pEntry->dstArrayElement,
1787          .array_count = pEntry->descriptorCount,
1788          .offset = pEntry->offset,
1789          .stride = pEntry->stride,
1790       };
1791    }
1792 
1793    *pDescriptorUpdateTemplate =
1794       anv_descriptor_update_template_to_handle(template);
1795 
1796    return VK_SUCCESS;
1797 }
1798 
anv_DestroyDescriptorUpdateTemplate(VkDevice _device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)1799 void anv_DestroyDescriptorUpdateTemplate(
1800     VkDevice                                    _device,
1801     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
1802     const VkAllocationCallbacks*                pAllocator)
1803 {
1804    ANV_FROM_HANDLE(anv_device, device, _device);
1805    ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1806                    descriptorUpdateTemplate);
1807 
1808    vk_object_base_finish(&template->base);
1809    vk_free2(&device->vk.alloc, pAllocator, template);
1810 }
1811 
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1812 void anv_UpdateDescriptorSetWithTemplate(
1813     VkDevice                                    _device,
1814     VkDescriptorSet                             descriptorSet,
1815     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
1816     const void*                                 pData)
1817 {
1818    ANV_FROM_HANDLE(anv_device, device, _device);
1819    ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1820    ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1821                    descriptorUpdateTemplate);
1822 
1823    anv_descriptor_set_write_template(device, set, NULL, template, pData);
1824 }
1825