4 * @author Sebastian Hack
6 * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
8 * The speciality here is, that nothing has to be recomputed if new nodes are created
11 * This algo has one core routine check_live_end_internal() which performs the liveness check.
12 * It only relies on the precomputation done in the constructor, which in turn needs:
14 * - the dominance tree
15 * - data obtained from a depth-first-search
17 * The precomputation remains valid as long as the CFG is not altered.
19 * Copyright (C) 2007 Universitaet Karlsruhe
20 * Released under the GPL
25 #include "irgraph_t.h"
26 #include "irphase_t.h"
27 #include "iredges_t.h"
35 #include "irlivechk.h"
37 typedef struct _bl_info_t {
38 ir_node *block; /**< The block. */
40 int id; /**< a tight number for the block.
41 we're just reusing the pre num from
44 bitset_t *red_reachable; /**< Holds all id's if blocks reachable
45 in the CFG modulo back edges. */
47 bitset_t *be_tgt_reach; /**< target blocks of back edges whose
48 sources are reachable from this block
49 in the reduced graph. */
51 bitset_t *be_tgt_dom; /**< target blocks of back edges which
52 are dominated by this block. */
55 #define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
60 firm_dbg_module_t *dbg;
62 bitset_t *back_edge_src;
63 bitset_t *back_edge_tgt;
67 static void *init_block_data(ir_phase *ph, ir_node *irn, void *old)
69 lv_chk_t *lv = container_of(ph, lv_chk_t, ph);
70 bl_info_t *bi = phase_alloc(ph, sizeof(bi[0]));
72 bi->id = dfs_get_pre_num(lv->dfs, irn);
74 bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
75 bi->be_tgt_reach = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
76 bi->be_tgt_dom = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
81 * Filter function to select all nodes for which liveness is computed.
83 * @return 1 if the node shall be considered in liveness, 0 if not.
85 static INLINE int is_liveness_node(const ir_node *irn)
87 switch(get_irn_opcode(irn)) {
99 * Compute the transitive closure on the reduced graph.
100 * The reduced graph is the original graph without back edges.
101 * Since that is a DAG, a reverse post order of the graph gives a toposort
102 * which is ideally suited to compute the transitive closure.
103 * Note also, that the DFS tree of the reduced graph is the same than the one
104 * of the original graph. This saves us computing a new reverse post order.
105 * We also can re-use the DFS tree of the original graph.
107 static void red_trans_closure(lv_chk_t *lv)
111 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
112 ir_node *bl = dfs_get_post_num_node(lv->dfs, i);
113 bl_info_t *bi = get_block_info(lv, bl);
115 const ir_edge_t *edge;
117 foreach_block_succ (bl, edge) {
118 ir_node *succ = get_edge_src_irn(edge);
119 bl_info_t *si = get_block_info(lv, succ);
120 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
123 * if the successor is no back edge, include all reachable
124 * blocks from there into the reachable set of the current node
126 if (kind != DFS_EDGE_BACK) {
127 assert(dfs_get_post_num(lv->dfs, bl)
128 > dfs_get_post_num(lv->dfs, succ));
129 bitset_or(bi->red_reachable, si->red_reachable);
130 bitset_set(bi->red_reachable, si->id);
133 /* mark the block as a back edge src and succ as back edge tgt. */
135 bitset_set(lv->back_edge_src, bi->id);
136 bitset_set(lv->back_edge_tgt, si->id);
145 * Compute the two back edge sets for each block.
146 * <code>be_tgt_reach</code> contains all target blocks of a back edges reachable from a node.
147 * <code>be_tgt_dom</code> contains all target blocks of back edges strictly dominated
150 static void compute_back_edge_sets(lv_chk_t *lv, ir_node *bl)
152 bl_info_t *bi = get_block_info(lv, bl);
153 bitset_t *tmp = bitset_alloca(lv->n_blocks);
158 dominates_for_each (bl, n) {
159 bl_info_t *ni = get_block_info(lv, n);
161 /* compute information for dominance sub tree */
162 compute_back_edge_sets(lv, n);
165 * of course all blocks dominated by blocks in the
166 * subtree are also dominated by bl.
168 bitset_or(bi->be_tgt_dom, ni->be_tgt_dom);
171 * add the immeditate dominee to the back edge tgt dominance
172 * bitset if it is the target node of a back edge.
174 if (bitset_is_set(lv->back_edge_tgt, ni->id))
175 bitset_set(bi->be_tgt_dom, ni->id);
179 * iterate over all back edge src nodes which are reachable from
180 * this nodes and put the targets of the back edges in the be_tgt_reach
181 * bitset of the node.
183 bitset_copy(tmp, bi->red_reachable);
184 bitset_set(tmp, bi->id);
185 bitset_and(tmp, lv->back_edge_src);
186 bitset_foreach (tmp, elm) {
187 ir_node *src = lv->map[elm]->block;
188 const ir_edge_t *edge;
190 foreach_block_succ (src, edge) {
191 ir_node *succ = get_edge_src_irn(edge);
192 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, src, succ);
194 if (kind == DFS_EDGE_BACK) {
195 bl_info_t *si = get_block_info(lv, succ);
196 bitset_set(bi->be_tgt_reach, si->id);
202 lv_chk_t *lv_chk_new(ir_graph *irg)
204 lv_chk_t *res = xmalloc(sizeof(res[0]));
205 struct obstack *obst;
208 phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL);
209 obst = phase_obst(&res->ph);
211 FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
213 res->dfs = dfs_new(&absgraph_irg_cfg_succ, irg);
214 res->n_blocks = dfs_get_n_nodes(res->dfs);
215 res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
216 res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
217 res->map = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0]));
223 ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg);
224 if ((f = fopen(name, "wt")) != NULL) {
225 dfs_dump(res->dfs, f);
228 dump_ir_block_graph(irg, "-lvchk");
232 /* fill the map which maps pre_num to block infos */
233 for (i = res->n_blocks - 1; i >= 0; --i) {
234 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
235 res->map[i] = phase_get_or_set_irn_data(&res->ph, irn);
238 /* first of all, compute the transitive closure of the CFG *without* back edges */
239 red_trans_closure(res);
241 /* now fill the two remaining bitsets concerning back edges */
242 compute_back_edge_sets(res, get_irg_start_block(irg));
244 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
245 for (i = res->n_blocks - 1; i >= 0; --i) {
246 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
247 bl_info_t *bi = get_block_info(res, irn);
248 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
249 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
250 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
251 DBG((res->dbg, LEVEL_1, "\ttgt dom: %B\n", bi->be_tgt_dom));
254 DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
255 DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
260 void lv_chk_free(lv_chk_t *lv)
262 obstack_free(phase_obst(&lv->ph), NULL);
268 * Check if a node is live at the end of a block.
269 * This function is for internal use as its code is shared between
270 * the in/end routines below. It is almost the "live_end" routine
271 * but passing in the bitset for recording the blocks where the variable
272 * is used saves some effort in the "live_in" routine. See below for
275 * @param lv The liveness check environment.
276 * @param what The node to check for.
277 * @param bl The block under investigation.
278 * @param end If 1, it is tested if the node is live at the end.
279 * If 0, it is only tested if the node is live out.
280 * @param uses A bitset where this routine records all ids of blocks
281 * where this variable is used. Note that the bitset
282 * is only guaranteed to be filled if the node was not
283 * live at the end of the block.
284 * @return 1, if @p what is live at the end at @p bl.
286 static int check_live_internal(const lv_chk_t *lv, const ir_node *what, const ir_node *bl, int end, bitset_t *uses)
290 assert(is_Block(bl) && "can only check for liveness in a block");
292 if (!is_liveness_node(what))
295 what_bl = get_nodes_block(what);
296 if (!block_dominates(what_bl, bl))
300 * If the block in question is the same as the definition block,
301 * the algorithm is simple. JUst check for uses not inside this block.
304 const ir_edge_t *edge;
306 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", what, bl));
307 foreach_out_edge (what, edge) {
308 ir_node *use = get_edge_src_irn(edge);
311 if (!is_liveness_node(use))
314 use_bl = get_nodes_block(use);
316 int pos = get_edge_src_pos(edge);
317 use_bl = get_Block_cfgpred_block(use_bl, pos);
319 if (end && use_bl == bl) {
320 DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
325 if (use_bl != what_bl)
332 /* this is the complicated case */
334 bitset_t *visited = bitset_alloca(lv->n_blocks);
335 bitset_t *to_visit = bitset_alloca(lv->n_blocks);
336 bitset_t *next = bitset_alloca(lv->n_blocks);
337 bl_info_t *def = get_block_info(lv, what_bl);
338 bl_info_t *bli = get_block_info(lv, bl);
340 const ir_edge_t *edge;
342 foreach_out_edge (what, edge) {
343 ir_node *user = get_edge_src_irn(edge);
346 if (!is_liveness_node(user))
349 use_bl = get_nodes_block(user);
351 int pos = get_edge_src_pos(edge);
352 ir_node *pred_bl = get_Block_cfgpred_block(use_bl, pos);
353 bl_info_t *bi = get_block_info(lv, pred_bl);
355 if (end && pred_bl == bl)
358 bitset_set(uses, bi->id);
362 bl_info_t *bi = get_block_info(lv, use_bl);
363 bitset_set(uses, bi->id);
366 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
368 bitset_clear(uses, def->id);
369 bitset_set(to_visit, bli->id);
371 int id = bitset_next_set(to_visit, 0);
372 bl_info_t *bi = lv->map[id];
374 DBG((lv->dbg, LEVEL_2, "\tto visit: %B\n", to_visit));
375 DBG((lv->dbg, LEVEL_2, "\tvisited: %B\n", visited));
378 * if one of the blocks is reachable, the node must be live there.
379 * Not that this is not sufficient, since the nodes reachable
380 * via back edges are not contained in the red_reachable set.
382 if (bitset_intersect(bi->red_reachable, uses))
386 * if not, we have to check the back edges in question, if
387 * they lead to places which are reachable.
390 bitset_set(visited, id);
391 bitset_or(visited, bi->red_reachable);
393 bitset_copy(next, bi->be_tgt_reach);
394 bitset_and(next, def->be_tgt_dom);
395 DBG((lv->dbg, LEVEL_2, "\tnext: %B\n----\n", next));
397 if (bitset_intersect(uses, next))
400 bitset_or(to_visit, next);
401 bitset_andnot(to_visit, visited);
404 } while (!bitset_is_empty(to_visit));
410 int lv_chk_bl_end(const lv_chk_t *lv, const ir_node *bl, const ir_node *what)
412 bitset_t *uses = bitset_alloca(lv->n_blocks);
413 return check_live_internal(lv, what, bl, 1, uses);
416 int lv_chk_bl_out(const lv_chk_t *lv, const ir_node *bl, const ir_node *what)
418 bitset_t *uses = bitset_alloca(lv->n_blocks);
419 return check_live_internal(lv, what, bl, 0, uses);
422 int lv_chk_bl_in(const lv_chk_t *lv, const ir_node *bl, const ir_node *what)
425 * only check, if the node is not defined in this block.
426 * Under SSA, a node can never be live in at its definition block.
428 if (get_nodes_block(what) != bl) {
429 bl_info_t *bi = get_block_info(lv, bl);
431 bitset_t *uses = bitset_alloca(lv->n_blocks);
432 int live_at_end = check_live_internal(lv, what, bl, 1, uses);
434 /* to be live in, the value must be live at the end or have a use in this block */
435 return live_at_end || bitset_is_set(uses, id);