1 /*
2 * Copyright © 2020 Valve Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 */
24
25 /*
26 * Replaces make availability/visible semantics on barriers with
27 * ACCESS_COHERENT on memory loads/stores
28 */
29
30 #include "nir/nir.h"
31 #include "shader_enums.h"
32
33 static bool
get_intrinsic_info(nir_intrinsic_instr * intrin,nir_variable_mode * modes,bool * reads,bool * writes)34 get_intrinsic_info(nir_intrinsic_instr *intrin, nir_variable_mode *modes,
35 bool *reads, bool *writes)
36 {
37 switch (intrin->intrinsic) {
38 case nir_intrinsic_image_deref_load:
39 *modes = nir_src_as_deref(intrin->src[0])->modes;
40 *reads = true;
41 break;
42 case nir_intrinsic_image_deref_store:
43 *modes = nir_src_as_deref(intrin->src[0])->modes;
44 *writes = true;
45 break;
46 case nir_intrinsic_image_deref_atomic_add:
47 case nir_intrinsic_image_deref_atomic_umin:
48 case nir_intrinsic_image_deref_atomic_imin:
49 case nir_intrinsic_image_deref_atomic_umax:
50 case nir_intrinsic_image_deref_atomic_imax:
51 case nir_intrinsic_image_deref_atomic_and:
52 case nir_intrinsic_image_deref_atomic_or:
53 case nir_intrinsic_image_deref_atomic_xor:
54 case nir_intrinsic_image_deref_atomic_exchange:
55 case nir_intrinsic_image_deref_atomic_comp_swap:
56 *modes = nir_src_as_deref(intrin->src[0])->modes;
57 *reads = true;
58 *writes = true;
59 break;
60 case nir_intrinsic_load_ssbo:
61 *modes = nir_var_mem_ssbo;
62 *reads = true;
63 break;
64 case nir_intrinsic_store_ssbo:
65 *modes = nir_var_mem_ssbo;
66 *writes = true;
67 break;
68 case nir_intrinsic_ssbo_atomic_add:
69 case nir_intrinsic_ssbo_atomic_imin:
70 case nir_intrinsic_ssbo_atomic_umin:
71 case nir_intrinsic_ssbo_atomic_imax:
72 case nir_intrinsic_ssbo_atomic_umax:
73 case nir_intrinsic_ssbo_atomic_and:
74 case nir_intrinsic_ssbo_atomic_or:
75 case nir_intrinsic_ssbo_atomic_xor:
76 case nir_intrinsic_ssbo_atomic_exchange:
77 case nir_intrinsic_ssbo_atomic_comp_swap:
78 *modes = nir_var_mem_ssbo;
79 *reads = true;
80 *writes = true;
81 break;
82 case nir_intrinsic_load_global:
83 *modes = nir_var_mem_global;
84 *reads = true;
85 break;
86 case nir_intrinsic_store_global:
87 *modes = nir_var_mem_global;
88 *writes = true;
89 break;
90 case nir_intrinsic_global_atomic_add:
91 case nir_intrinsic_global_atomic_imin:
92 case nir_intrinsic_global_atomic_umin:
93 case nir_intrinsic_global_atomic_imax:
94 case nir_intrinsic_global_atomic_umax:
95 case nir_intrinsic_global_atomic_and:
96 case nir_intrinsic_global_atomic_or:
97 case nir_intrinsic_global_atomic_xor:
98 case nir_intrinsic_global_atomic_exchange:
99 case nir_intrinsic_global_atomic_comp_swap:
100 *modes = nir_var_mem_global;
101 *reads = true;
102 *writes = true;
103 break;
104 case nir_intrinsic_load_deref:
105 *modes = nir_src_as_deref(intrin->src[0])->modes;
106 *reads = true;
107 break;
108 case nir_intrinsic_store_deref:
109 *modes = nir_src_as_deref(intrin->src[0])->modes;
110 *writes = true;
111 break;
112 case nir_intrinsic_deref_atomic_add:
113 case nir_intrinsic_deref_atomic_imin:
114 case nir_intrinsic_deref_atomic_umin:
115 case nir_intrinsic_deref_atomic_imax:
116 case nir_intrinsic_deref_atomic_umax:
117 case nir_intrinsic_deref_atomic_and:
118 case nir_intrinsic_deref_atomic_or:
119 case nir_intrinsic_deref_atomic_xor:
120 case nir_intrinsic_deref_atomic_exchange:
121 case nir_intrinsic_deref_atomic_comp_swap:
122 *modes = nir_src_as_deref(intrin->src[0])->modes;
123 *reads = true;
124 *writes = true;
125 break;
126 default:
127 return false;
128 }
129 return true;
130 }
131
132 static bool
visit_instr(nir_instr * instr,uint32_t * cur_modes,unsigned vis_avail_sem)133 visit_instr(nir_instr *instr, uint32_t *cur_modes, unsigned vis_avail_sem)
134 {
135 if (instr->type != nir_instr_type_intrinsic)
136 return false;
137 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
138
139 if (intrin->intrinsic == nir_intrinsic_scoped_barrier &&
140 (nir_intrinsic_memory_semantics(intrin) & vis_avail_sem)) {
141 *cur_modes |= nir_intrinsic_memory_modes(intrin);
142
143 unsigned semantics = nir_intrinsic_memory_semantics(intrin);
144 nir_intrinsic_set_memory_semantics(
145 intrin, semantics & ~vis_avail_sem);
146 return true;
147 }
148
149 if (!*cur_modes)
150 return false; /* early exit */
151
152 nir_variable_mode modes;
153 bool reads = false, writes = false;
154 if (!get_intrinsic_info(intrin, &modes, &reads, &writes))
155 return false;
156
157 if (!reads && vis_avail_sem == NIR_MEMORY_MAKE_VISIBLE)
158 return false;
159 if (!writes && vis_avail_sem == NIR_MEMORY_MAKE_AVAILABLE)
160 return false;
161
162 if (!nir_intrinsic_has_access(intrin))
163 return false;
164
165 unsigned access = nir_intrinsic_access(intrin);
166
167 if (access & (ACCESS_NON_READABLE | ACCESS_NON_WRITEABLE | ACCESS_CAN_REORDER | ACCESS_COHERENT))
168 return false;
169
170 if (*cur_modes & modes) {
171 nir_intrinsic_set_access(intrin, access | ACCESS_COHERENT);
172 return true;
173 }
174
175 return false;
176 }
177
178 static bool
lower_make_visible(nir_cf_node * cf_node,uint32_t * cur_modes)179 lower_make_visible(nir_cf_node *cf_node, uint32_t *cur_modes)
180 {
181 bool progress = false;
182 switch (cf_node->type) {
183 case nir_cf_node_block: {
184 nir_block *block = nir_cf_node_as_block(cf_node);
185 nir_foreach_instr(instr, block)
186 progress |= visit_instr(instr, cur_modes, NIR_MEMORY_MAKE_VISIBLE);
187 break;
188 }
189 case nir_cf_node_if: {
190 nir_if *nif = nir_cf_node_as_if(cf_node);
191 uint32_t cur_modes_then = *cur_modes;
192 uint32_t cur_modes_else = *cur_modes;
193 foreach_list_typed(nir_cf_node, if_node, node, &nif->then_list)
194 progress |= lower_make_visible(if_node, &cur_modes_then);
195 foreach_list_typed(nir_cf_node, if_node, node, &nif->else_list)
196 progress |= lower_make_visible(if_node, &cur_modes_else);
197 *cur_modes |= cur_modes_then | cur_modes_else;
198 break;
199 }
200 case nir_cf_node_loop: {
201 nir_loop *loop = nir_cf_node_as_loop(cf_node);
202 bool loop_progress;
203 do {
204 loop_progress = false;
205 foreach_list_typed(nir_cf_node, loop_node, node, &loop->body)
206 loop_progress |= lower_make_visible(loop_node, cur_modes);
207 progress |= loop_progress;
208 } while (loop_progress);
209 break;
210 }
211 case nir_cf_node_function:
212 unreachable("Invalid cf type");
213 }
214 return progress;
215 }
216
217 static bool
lower_make_available(nir_cf_node * cf_node,uint32_t * cur_modes)218 lower_make_available(nir_cf_node *cf_node, uint32_t *cur_modes)
219 {
220 bool progress = false;
221 switch (cf_node->type) {
222 case nir_cf_node_block: {
223 nir_block *block = nir_cf_node_as_block(cf_node);
224 nir_foreach_instr_reverse(instr, block)
225 progress |= visit_instr(instr, cur_modes, NIR_MEMORY_MAKE_AVAILABLE);
226 break;
227 }
228 case nir_cf_node_if: {
229 nir_if *nif = nir_cf_node_as_if(cf_node);
230 uint32_t cur_modes_then = *cur_modes;
231 uint32_t cur_modes_else = *cur_modes;
232 foreach_list_typed_reverse(nir_cf_node, if_node, node, &nif->then_list)
233 progress |= lower_make_available(if_node, &cur_modes_then);
234 foreach_list_typed_reverse(nir_cf_node, if_node, node, &nif->else_list)
235 progress |= lower_make_available(if_node, &cur_modes_else);
236 *cur_modes |= cur_modes_then | cur_modes_else;
237 break;
238 }
239 case nir_cf_node_loop: {
240 nir_loop *loop = nir_cf_node_as_loop(cf_node);
241 bool loop_progress;
242 do {
243 loop_progress = false;
244 foreach_list_typed_reverse(nir_cf_node, loop_node, node, &loop->body)
245 loop_progress |= lower_make_available(loop_node, cur_modes);
246 progress |= loop_progress;
247 } while (loop_progress);
248 break;
249 }
250 case nir_cf_node_function:
251 unreachable("Invalid cf type");
252 }
253 return progress;
254 }
255
256 bool
nir_lower_memory_model(nir_shader * shader)257 nir_lower_memory_model(nir_shader *shader)
258 {
259 bool progress = false;
260
261 struct exec_list *cf_list = &nir_shader_get_entrypoint(shader)->body;
262
263 uint32_t modes = 0;
264 foreach_list_typed(nir_cf_node, cf_node, node, cf_list)
265 progress |= lower_make_visible(cf_node, &modes);
266
267 modes = 0;
268 foreach_list_typed_reverse(nir_cf_node, cf_node, node, cf_list)
269 progress |= lower_make_available(cf_node, &modes);
270
271 return progress;
272 }
273