1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "lvp_private.h"
25 #include "nir.h"
26 #include "nir_builder.h"
27 #include "lvp_lower_vulkan_resource.h"
28 
29 static bool
lower_vulkan_resource_index(const nir_instr * instr,const void * data_cb)30 lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)
31 {
32    if (instr->type == nir_instr_type_intrinsic) {
33       nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
34       switch (intrin->intrinsic) {
35       case nir_intrinsic_vulkan_resource_index:
36       case nir_intrinsic_vulkan_resource_reindex:
37       case nir_intrinsic_load_vulkan_descriptor:
38       case nir_intrinsic_get_ssbo_size:
39          return true;
40       default:
41          return false;
42       }
43    }
44    if (instr->type == nir_instr_type_tex) {
45       return true;
46    }
47    return false;
48 }
49 
lower_vri_intrin_vri(struct nir_builder * b,nir_instr * instr,void * data_cb)50 static nir_ssa_def *lower_vri_intrin_vri(struct nir_builder *b,
51                                            nir_instr *instr, void *data_cb)
52 {
53    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
54    unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);
55    unsigned binding_idx = nir_intrinsic_binding(intrin);
56    struct lvp_pipeline_layout *layout = data_cb;
57    struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
58    int value = 0;
59    bool is_ubo = (binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
60                   binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
61 
62    for (unsigned s = 0; s < desc_set_idx; s++) {
63      if (is_ubo)
64        value += layout->set[s].layout->stage[b->shader->info.stage].const_buffer_count;
65      else
66        value += layout->set[s].layout->stage[b->shader->info.stage].shader_buffer_count;
67    }
68    if (is_ubo)
69      value += binding->stage[b->shader->info.stage].const_buffer_index + 1;
70    else
71      value += binding->stage[b->shader->info.stage].shader_buffer_index;
72 
73    /* The SSA size for indices is the same as for pointers.  We use
74     * nir_addr_format_32bit_index_offset so we need a vec2.  We don't need all
75     * that data so just stuff a 0 in the second component.
76     */
77    if (nir_src_is_const(intrin->src[0])) {
78       value += nir_src_comp_as_int(intrin->src[0], 0);
79       return nir_imm_ivec2(b, value, 0);
80    } else
81       return nir_vec2(b, nir_iadd_imm(b, intrin->src[0].ssa, value),
82                          nir_imm_int(b, 0));
83 }
84 
lower_vri_intrin_vrri(struct nir_builder * b,nir_instr * instr,void * data_cb)85 static nir_ssa_def *lower_vri_intrin_vrri(struct nir_builder *b,
86                                           nir_instr *instr, void *data_cb)
87 {
88    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
89    nir_ssa_def *old_index = nir_ssa_for_src(b, intrin->src[0], 1);
90    nir_ssa_def *delta = nir_ssa_for_src(b, intrin->src[1], 1);
91    return nir_vec2(b, nir_iadd(b, old_index, delta),
92                       nir_imm_int(b, 0));
93 }
94 
lower_vri_intrin_lvd(struct nir_builder * b,nir_instr * instr,void * data_cb)95 static nir_ssa_def *lower_vri_intrin_lvd(struct nir_builder *b,
96                                          nir_instr *instr, void *data_cb)
97 {
98    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
99    nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
100    return nir_vec2(b, index, nir_imm_int(b, 0));
101 }
102 
lower_vri_instr_tex_deref(nir_tex_instr * tex,nir_tex_src_type deref_src_type,gl_shader_stage stage,struct lvp_pipeline_layout * layout)103 static int lower_vri_instr_tex_deref(nir_tex_instr *tex,
104                                      nir_tex_src_type deref_src_type,
105                                      gl_shader_stage stage,
106                                      struct lvp_pipeline_layout *layout)
107 {
108    int deref_src_idx = nir_tex_instr_src_index(tex, deref_src_type);
109 
110    if (deref_src_idx < 0)
111       return -1;
112 
113    nir_deref_instr *deref_instr = nir_src_as_deref(tex->src[deref_src_idx].src);
114    nir_variable *var = nir_deref_instr_get_variable(deref_instr);
115    unsigned desc_set_idx = var->data.descriptor_set;
116    unsigned binding_idx = var->data.binding;
117    int value = 0;
118    struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
119    nir_tex_instr_remove_src(tex, deref_src_idx);
120    for (unsigned s = 0; s < desc_set_idx; s++) {
121       if (deref_src_type == nir_tex_src_sampler_deref)
122          value += layout->set[s].layout->stage[stage].sampler_count;
123       else
124          value += layout->set[s].layout->stage[stage].sampler_view_count;
125    }
126    if (deref_src_type == nir_tex_src_sampler_deref)
127       value += binding->stage[stage].sampler_index;
128    else
129       value += binding->stage[stage].sampler_view_index;
130 
131    if (deref_instr->deref_type == nir_deref_type_array) {
132       if (nir_src_is_const(deref_instr->arr.index))
133          value += nir_src_as_uint(deref_instr->arr.index);
134       else {
135          if (deref_src_type == nir_tex_src_sampler_deref)
136             nir_tex_instr_add_src(tex, nir_tex_src_sampler_offset, deref_instr->arr.index);
137          else
138             nir_tex_instr_add_src(tex, nir_tex_src_texture_offset, deref_instr->arr.index);
139       }
140    }
141    if (deref_src_type == nir_tex_src_sampler_deref)
142       tex->sampler_index = value;
143    else
144       tex->texture_index = value;
145    return value;
146 }
147 
lower_vri_instr_tex(struct nir_builder * b,nir_tex_instr * tex,void * data_cb)148 static void lower_vri_instr_tex(struct nir_builder *b,
149                                 nir_tex_instr *tex, void *data_cb)
150 {
151    struct lvp_pipeline_layout *layout = data_cb;
152    int tex_value = 0;
153 
154    lower_vri_instr_tex_deref(tex, nir_tex_src_sampler_deref, b->shader->info.stage, layout);
155    tex_value = lower_vri_instr_tex_deref(tex, nir_tex_src_texture_deref, b->shader->info.stage, layout);
156    if (tex_value >= 0)
157       b->shader->info.textures_used |= (1 << tex_value);
158 }
159 
lower_vri_instr(struct nir_builder * b,nir_instr * instr,void * data_cb)160 static nir_ssa_def *lower_vri_instr(struct nir_builder *b,
161                                     nir_instr *instr, void *data_cb)
162 {
163    if (instr->type == nir_instr_type_intrinsic) {
164       nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
165       switch (intrin->intrinsic) {
166       case nir_intrinsic_vulkan_resource_index:
167          return lower_vri_intrin_vri(b, instr, data_cb);
168 
169       case nir_intrinsic_vulkan_resource_reindex:
170          return lower_vri_intrin_vrri(b, instr, data_cb);
171 
172       case nir_intrinsic_load_vulkan_descriptor:
173          return lower_vri_intrin_lvd(b, instr, data_cb);
174 
175       case nir_intrinsic_get_ssbo_size: {
176          /* The result of the load_vulkan_descriptor is a vec2(index, offset)
177           * but we only want the index in get_ssbo_size.
178           */
179          b->cursor = nir_before_instr(&intrin->instr);
180          nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
181          nir_instr_rewrite_src(&intrin->instr, &intrin->src[0],
182                                nir_src_for_ssa(index));
183          return NULL;
184       }
185 
186       default:
187          return NULL;
188       }
189    }
190    if (instr->type == nir_instr_type_tex)
191       lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);
192    return NULL;
193 }
194 
lvp_lower_pipeline_layout(const struct lvp_device * device,struct lvp_pipeline_layout * layout,nir_shader * shader)195 void lvp_lower_pipeline_layout(const struct lvp_device *device,
196                                struct lvp_pipeline_layout *layout,
197                                nir_shader *shader)
198 {
199    nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);
200    nir_foreach_uniform_variable(var, shader) {
201       const struct glsl_type *type = var->type;
202       enum glsl_base_type base_type =
203          glsl_get_base_type(glsl_without_array(type));
204       unsigned desc_set_idx = var->data.descriptor_set;
205       unsigned binding_idx = var->data.binding;
206       struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
207       int value = 0;
208       var->data.descriptor_set = 0;
209       if (base_type == GLSL_TYPE_SAMPLER) {
210          if (binding->type == VK_DESCRIPTOR_TYPE_SAMPLER) {
211             for (unsigned s = 0; s < desc_set_idx; s++)
212                value += layout->set[s].layout->stage[shader->info.stage].sampler_count;
213             value += binding->stage[shader->info.stage].sampler_index;
214          } else {
215             for (unsigned s = 0; s < desc_set_idx; s++)
216                value += layout->set[s].layout->stage[shader->info.stage].sampler_view_count;
217             value += binding->stage[shader->info.stage].sampler_view_index;
218          }
219          var->data.binding = value;
220       }
221       if (base_type == GLSL_TYPE_IMAGE) {
222          var->data.descriptor_set = 0;
223          for (unsigned s = 0; s < desc_set_idx; s++)
224            value += layout->set[s].layout->stage[shader->info.stage].image_count;
225          value += binding->stage[shader->info.stage].image_index;
226          var->data.binding = value;
227       }
228    }
229 }
230