1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Jason Ekstrand (jason@jlekstrand.net)
25  */
26 
27 #include "nir.h"
28 #include "nir_worklist.h"
29 #include "nir_vla.h"
30 
31 /*
32  * Basic liveness analysis.  This works only in SSA form.
33  *
34  * This liveness pass treats phi nodes as being melded to the space between
35  * blocks so that the destinations of a phi are in the livein of the block
36  * in which it resides and the sources are in the liveout of the
37  * corresponding block.  By formulating the liveness information in this
38  * way, we ensure that the definition of any variable dominates its entire
39  * live range.  This is true because the only way that the definition of an
40  * SSA value may not dominate a use is if the use is in a phi node and the
41  * uses in phi no are in the live-out of the corresponding predecessor
42  * block but not in the live-in of the block containing the phi node.
43  */
44 
45 struct live_ssa_defs_state {
46    unsigned bitset_words;
47 
48    /* Used in propagate_across_edge() */
49    BITSET_WORD *tmp_live;
50 
51    nir_block_worklist worklist;
52 };
53 
54 /* Initialize the liveness data to zero and add the given block to the
55  * worklist.
56  */
57 static bool
init_liveness_block(nir_block * block,struct live_ssa_defs_state * state)58 init_liveness_block(nir_block *block,
59                     struct live_ssa_defs_state *state)
60 {
61    block->live_in = reralloc(block, block->live_in, BITSET_WORD,
62                              state->bitset_words);
63    memset(block->live_in, 0, state->bitset_words * sizeof(BITSET_WORD));
64 
65    block->live_out = reralloc(block, block->live_out, BITSET_WORD,
66                               state->bitset_words);
67    memset(block->live_out, 0, state->bitset_words * sizeof(BITSET_WORD));
68 
69    nir_block_worklist_push_head(&state->worklist, block);
70 
71    return true;
72 }
73 
74 static bool
set_src_live(nir_src * src,void * void_live)75 set_src_live(nir_src *src, void *void_live)
76 {
77    BITSET_WORD *live = void_live;
78 
79    if (!src->is_ssa)
80       return true;
81 
82    if (src->ssa->parent_instr->type == nir_instr_type_ssa_undef)
83       return true;   /* undefined variables are never live */
84 
85    BITSET_SET(live, src->ssa->index);
86 
87    return true;
88 }
89 
90 static bool
set_ssa_def_dead(nir_ssa_def * def,void * void_live)91 set_ssa_def_dead(nir_ssa_def *def, void *void_live)
92 {
93    BITSET_WORD *live = void_live;
94 
95    BITSET_CLEAR(live, def->index);
96 
97    return true;
98 }
99 
100 /** Propagates the live in of succ across the edge to the live out of pred
101  *
102  * Phi nodes exist "between" blocks and all the phi nodes at the start of a
103  * block act "in parallel".  When we propagate from the live_in of one
104  * block to the live out of the other, we have to kill any writes from phis
105  * and make live any sources.
106  *
107  * Returns true if updating live out of pred added anything
108  */
109 static bool
propagate_across_edge(nir_block * pred,nir_block * succ,struct live_ssa_defs_state * state)110 propagate_across_edge(nir_block *pred, nir_block *succ,
111                       struct live_ssa_defs_state *state)
112 {
113    BITSET_WORD *live = state->tmp_live;
114    memcpy(live, succ->live_in, state->bitset_words * sizeof *live);
115 
116    nir_foreach_instr(instr, succ) {
117       if (instr->type != nir_instr_type_phi)
118          break;
119       nir_phi_instr *phi = nir_instr_as_phi(instr);
120 
121       assert(phi->dest.is_ssa);
122       set_ssa_def_dead(&phi->dest.ssa, live);
123    }
124 
125    nir_foreach_instr(instr, succ) {
126       if (instr->type != nir_instr_type_phi)
127          break;
128       nir_phi_instr *phi = nir_instr_as_phi(instr);
129 
130       nir_foreach_phi_src(src, phi) {
131          if (src->pred == pred) {
132             set_src_live(&src->src, live);
133             break;
134          }
135       }
136    }
137 
138    BITSET_WORD progress = 0;
139    for (unsigned i = 0; i < state->bitset_words; ++i) {
140       progress |= live[i] & ~pred->live_out[i];
141       pred->live_out[i] |= live[i];
142    }
143    return progress != 0;
144 }
145 
146 void
nir_live_ssa_defs_impl(nir_function_impl * impl)147 nir_live_ssa_defs_impl(nir_function_impl *impl)
148 {
149    struct live_ssa_defs_state state = {
150       .bitset_words = BITSET_WORDS(impl->ssa_alloc),
151    };
152    state.tmp_live = rzalloc_array(impl, BITSET_WORD, state.bitset_words),
153 
154    /* Number the instructions so we can do cheap interference tests using the
155     * instruction index.
156     */
157    nir_metadata_require(impl, nir_metadata_instr_index);
158 
159    nir_block_worklist_init(&state.worklist, impl->num_blocks, NULL);
160 
161    /* Allocate live_in and live_out sets and add all of the blocks to the
162     * worklist.
163     */
164    nir_foreach_block(block, impl) {
165       init_liveness_block(block, &state);
166    }
167 
168 
169    /* We're now ready to work through the worklist and update the liveness
170     * sets of each of the blocks.  By the time we get to this point, every
171     * block in the function implementation has been pushed onto the
172     * worklist in reverse order.  As long as we keep the worklist
173     * up-to-date as we go, everything will get covered.
174     */
175    while (!nir_block_worklist_is_empty(&state.worklist)) {
176       /* We pop them off in the reverse order we pushed them on.  This way
177        * the first walk of the instructions is backwards so we only walk
178        * once in the case of no control flow.
179        */
180       nir_block *block = nir_block_worklist_pop_head(&state.worklist);
181 
182       memcpy(block->live_in, block->live_out,
183              state.bitset_words * sizeof(BITSET_WORD));
184 
185       nir_if *following_if = nir_block_get_following_if(block);
186       if (following_if)
187          set_src_live(&following_if->condition, block->live_in);
188 
189       nir_foreach_instr_reverse(instr, block) {
190          /* Phi nodes are handled seperately so we want to skip them.  Since
191           * we are going backwards and they are at the beginning, we can just
192           * break as soon as we see one.
193           */
194          if (instr->type == nir_instr_type_phi)
195             break;
196 
197          nir_foreach_ssa_def(instr, set_ssa_def_dead, block->live_in);
198          nir_foreach_src(instr, set_src_live, block->live_in);
199       }
200 
201       /* Walk over all of the predecessors of the current block updating
202        * their live in with the live out of this one.  If anything has
203        * changed, add the predecessor to the work list so that we ensure
204        * that the new information is used.
205        */
206       set_foreach(block->predecessors, entry) {
207          nir_block *pred = (nir_block *)entry->key;
208          if (propagate_across_edge(pred, block, &state))
209             nir_block_worklist_push_tail(&state.worklist, pred);
210       }
211    }
212 
213    ralloc_free(state.tmp_live);
214    nir_block_worklist_fini(&state.worklist);
215 }
216 
217 static bool
src_does_not_use_def(nir_src * src,void * def)218 src_does_not_use_def(nir_src *src, void *def)
219 {
220    return !src->is_ssa || src->ssa != (nir_ssa_def *)def;
221 }
222 
223 static bool
search_for_use_after_instr(nir_instr * start,nir_ssa_def * def)224 search_for_use_after_instr(nir_instr *start, nir_ssa_def *def)
225 {
226    /* Only look for a use strictly after the given instruction */
227    struct exec_node *node = start->node.next;
228    while (!exec_node_is_tail_sentinel(node)) {
229       nir_instr *instr = exec_node_data(nir_instr, node, node);
230       if (!nir_foreach_src(instr, src_does_not_use_def, def))
231          return true;
232       node = node->next;
233    }
234 
235    /* If uses are considered to be in the block immediately preceding the if
236     * so we need to also check the following if condition, if any.
237     */
238    nir_if *following_if = nir_block_get_following_if(start->block);
239    if (following_if && following_if->condition.is_ssa &&
240        following_if->condition.ssa == def)
241       return true;
242 
243    return false;
244 }
245 
246 /* Returns true if def is live at instr assuming that def comes before
247  * instr in a pre DFS search of the dominance tree.
248  */
249 static bool
nir_ssa_def_is_live_at(nir_ssa_def * def,nir_instr * instr)250 nir_ssa_def_is_live_at(nir_ssa_def *def, nir_instr *instr)
251 {
252    if (BITSET_TEST(instr->block->live_out, def->index)) {
253       /* Since def dominates instr, if def is in the liveout of the block,
254        * it's live at instr
255        */
256       return true;
257    } else {
258       if (BITSET_TEST(instr->block->live_in, def->index) ||
259           def->parent_instr->block == instr->block) {
260          /* In this case it is either live coming into instr's block or it
261           * is defined in the same block.  In this case, we simply need to
262           * see if it is used after instr.
263           */
264          return search_for_use_after_instr(instr, def);
265       } else {
266          return false;
267       }
268    }
269 }
270 
271 bool
nir_ssa_defs_interfere(nir_ssa_def * a,nir_ssa_def * b)272 nir_ssa_defs_interfere(nir_ssa_def *a, nir_ssa_def *b)
273 {
274    if (a->parent_instr == b->parent_instr) {
275       /* Two variables defined at the same time interfere assuming at
276        * least one isn't dead.
277        */
278       return true;
279    } else if (a->parent_instr->type == nir_instr_type_ssa_undef ||
280               b->parent_instr->type == nir_instr_type_ssa_undef) {
281       /* If either variable is an ssa_undef, then there's no interference */
282       return false;
283    } else if (a->parent_instr->index < b->parent_instr->index) {
284       return nir_ssa_def_is_live_at(a, b->parent_instr);
285    } else {
286       return nir_ssa_def_is_live_at(b, a->parent_instr);
287    }
288 }
289 
290 /* Takes an SSA def's defs and uses and expands the live interval to cover
291  * that range.  Control flow effects are handled separately.
292  */
def_cb(nir_ssa_def * def,void * state)293 static bool def_cb(nir_ssa_def *def, void *state)
294 {
295    nir_instr_liveness *liveness = state;
296    nir_instr *instr = def->parent_instr;
297    int index = def->index;
298 
299    liveness->defs[index].start = MIN2(liveness->defs[index].start, instr->index);
300 
301    nir_foreach_use(src, def) {
302       liveness->defs[index].end = MAX2(liveness->defs[index].end,
303                                        src->parent_instr->index);
304    }
305 
306    return true;
307 }
308 
309 nir_instr_liveness *
nir_live_ssa_defs_per_instr(nir_function_impl * impl)310 nir_live_ssa_defs_per_instr(nir_function_impl *impl)
311 {
312    /* We'll use block-level live_ssa_defs to expand our per-instr ranges for
313     * control flow.
314     */
315    nir_metadata_require(impl,
316                         nir_metadata_block_index |
317                         nir_metadata_instr_index |
318                         nir_metadata_live_ssa_defs);
319 
320    /* Make our struct. */
321    nir_instr_liveness *liveness = ralloc(NULL, nir_instr_liveness);
322    liveness->defs = rzalloc_array(liveness, nir_liveness_bounds,
323                                   impl->ssa_alloc);
324 
325    /* Set our starts so we can use MIN2() as we accumulate bounds. */
326    for (int i = 0; i < impl->ssa_alloc; i++)
327       liveness->defs->start = ~0;
328 
329    nir_foreach_block(block, impl) {
330       unsigned index;
331       BITSET_FOREACH_SET(index, block->live_in, impl->ssa_alloc) {
332          liveness->defs[index].start = MIN2(liveness->defs[index].start,
333                                             block->start_ip);
334       }
335 
336       nir_foreach_instr(instr, block) {
337          nir_foreach_ssa_def(instr, def_cb, liveness);
338       };
339 
340       /* track an if src's use.  We need to make sure that our value is live
341        * across the if reference, where we don't have an instr->index
342        * representing the use.  Mark it as live through the end of the block.
343        */
344       nir_if *nif = nir_block_get_following_if(block);
345       if (nif) {
346          if (nif->condition.is_ssa) {
347             liveness->defs[nif->condition.ssa->index].end = MAX2(
348                liveness->defs[nif->condition.ssa->index].end, block->end_ip);
349          }
350       }
351 
352       BITSET_FOREACH_SET(index, block->live_out, impl->ssa_alloc) {
353          liveness->defs[index].end = MAX2(liveness->defs[index].end,
354                                           block->end_ip);
355       }
356    }
357 
358    return liveness;
359 }
360