2 * Copyright (C) 1995-2007 Inria Rhone-Alpes. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * @author Sebastian Hack
26 * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
28 * The speciality here is, that nothing has to be recomputed if new nodes are created
29 * or old ones deleted.
31 * This algo has one core routine check_live_end_internal() which performs the liveness check.
32 * It only relies on the precomputation done in the constructor, which in turn needs:
34 * - the dominance tree
35 * - data obtained from a depth-first-search
37 * The precomputation remains valid as long as the CFG is not altered.
43 /* statev is expensive here, only enable when needed */
44 #define DISABLE_STATEV
46 #include "irgraph_t.h"
48 #include "irnodemap.h"
49 #include "iredges_t.h"
59 #include "irlivechk.h"
63 typedef struct bl_info_t {
64 const ir_node *block; /**< The block. */
67 int id : 31; /**< a tight number for the block.
68 we're just reusing the pre num from
70 bitset_t *red_reachable; /**< Holds all id's if blocks reachable
71 in the CFG modulo back edges. */
73 bitset_t *be_tgt_reach; /**< target blocks of back edges whose
74 sources are reachable from this block
75 in the reduced graph. */
79 ir_nodemap block_infos;
83 bitset_t *back_edge_src;
84 bitset_t *back_edge_tgt;
86 DEBUG_ONLY(firm_dbg_module_t *dbg;)
89 static bl_info_t *get_block_info(lv_chk_t *lv, const ir_node *block)
91 bl_info_t *info = ir_nodemap_get(bl_info_t, &lv->block_infos, block);
93 info = OALLOC(&lv->obst, bl_info_t);
94 info->id = get_Block_dom_tree_pre_num(block);
96 info->red_reachable = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
97 info->be_tgt_reach = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
98 info->be_tgt_calc = 0;
99 ir_nodemap_insert(&lv->block_infos, block, info);
105 * Compute the transitive closure on the reduced graph.
106 * The reduced graph is the original graph without back edges.
107 * Since that is a DAG, a reverse post order of the graph gives a toposort
108 * which is ideally suited to compute the transitive closure.
109 * Note also, that the DFS tree of the reduced graph is the same than the one
110 * of the original graph. This saves us computing a new reverse post order.
111 * We also can re-use the DFS tree of the original graph.
113 static void red_trans_closure(lv_chk_t *lv)
117 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
118 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
119 bl_info_t *bi = get_block_info(lv, bl);
121 bitset_set(bi->red_reachable, bi->id);
122 foreach_block_succ (bl, edge) {
123 ir_node *succ = get_edge_src_irn(edge);
124 bl_info_t *si = get_block_info(lv, succ);
125 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
128 * if the successor is no back edge, include all reachable
129 * blocks from there into the reachable set of the current node
131 if (kind != DFS_EDGE_BACK) {
132 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
133 bitset_or(bi->red_reachable, si->red_reachable);
136 /* mark the block as a back edge src and succ as back edge tgt. */
138 bitset_set(lv->back_edge_src, bi->id);
139 bitset_set(lv->back_edge_tgt, si->id);
147 static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
149 bitset_t *tmp = bitset_alloca(lv->n_blocks);
150 bl_info_t *bi = get_block_info(lv, bl);
152 DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id));
154 /* put all back edge sources reachable (reduced) from here in tmp */
155 bitset_copy(tmp, bi->red_reachable);
156 bitset_set(tmp, bi->id);
157 bitset_and(tmp, lv->back_edge_src);
160 DBG((lv->dbg, LEVEL_2, "\treachable be src: %B\n", tmp));
162 /* iterate over them ... */
163 bitset_foreach(tmp, elm) {
164 bl_info_t *si = lv->map[elm];
166 /* and find back edge targets which are not reduced reachable from bl */
167 foreach_block_succ (si->block, edge) {
168 ir_node *tgt = get_edge_src_irn(edge);
169 bl_info_t *ti = get_block_info(lv, tgt);
170 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, si->block, tgt);
172 if (kind == DFS_EDGE_BACK && !bitset_is_set(bi->red_reachable, ti->id)) {
173 if (!ti->be_tgt_calc)
174 compute_back_edge_chain(lv, tgt);
175 bitset_set(bi->be_tgt_reach, ti->id);
176 bitset_or(bi->be_tgt_reach, ti->be_tgt_reach);
179 bitset_clear(bi->be_tgt_reach, bi->id);
184 static inline void compute_back_edge_chains(lv_chk_t *lv)
188 DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src));
189 bitset_foreach(lv->back_edge_src, elm) {
190 compute_back_edge_chain(lv, lv->map[elm]->block);
193 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
194 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
195 bl_info_t *bi = get_block_info(lv, bl);
197 if (!bitset_is_set(lv->back_edge_tgt, bi->id)) {
198 foreach_block_succ (bl, edge) {
199 ir_node *succ = get_edge_src_irn(edge);
200 bl_info_t *si = get_block_info(lv, succ);
201 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
203 if (kind != DFS_EDGE_BACK) {
204 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
205 bitset_or(bi->be_tgt_reach, si->be_tgt_reach);
211 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
212 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
213 bl_info_t *bi = get_block_info(lv, bl);
214 bitset_set(bi->be_tgt_reach, bi->id);
218 lv_chk_t *lv_chk_new(ir_graph *irg)
220 lv_chk_t *res = XMALLOC(lv_chk_t);
226 ir_nodemap_init(&res->block_infos, irg);
227 obstack_init(&res->obst);
229 FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
231 res->dfs = dfs_new(&absgraph_irg_cfg_succ, irg);
232 res->n_blocks = dfs_get_n_nodes(res->dfs);
233 res->back_edge_src = bitset_obstack_alloc(&res->obst, res->n_blocks);
234 res->back_edge_tgt = bitset_obstack_alloc(&res->obst, res->n_blocks);
235 res->map = OALLOCNZ(&res->obst, bl_info_t*, res->n_blocks);
237 /* fill the map which maps pre_num to block infos */
238 for (i = res->n_blocks - 1; i >= 0; --i) {
239 ir_node *irn = (ir_node *) dfs_get_pre_num_node(res->dfs, i);
240 bl_info_t *bi = get_block_info(res, irn);
241 assert(bi->id < res->n_blocks);
242 assert(res->map[bi->id] == NULL);
243 res->map[bi->id] = bi;
246 /* first of all, compute the transitive closure of the CFG *without* back edges */
247 red_trans_closure(res);
249 /* compute back edge chains */
250 compute_back_edge_chains(res);
253 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
254 for (i = res->n_blocks - 1; i >= 0; --i) {
255 const ir_node *irn = (const ir_node*) dfs_get_pre_num_node(res->dfs, i);
256 bl_info_t *bi = get_block_info(res, irn);
257 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
258 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
259 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
263 DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
264 DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
266 stat_ev_tim_pop("lv_chk_cons_time");
270 void lv_chk_free(lv_chk_t *lv)
273 obstack_free(&lv->obst, NULL);
274 ir_nodemap_destroy(&lv->block_infos);
278 unsigned lv_chk_bl_xxx(lv_chk_t *lv, const ir_node *bl, const ir_node *var)
282 stat_ev_cnt_decl(uses);
283 stat_ev_cnt_decl(iter);
285 assert(is_Block(bl) && "can only check for liveness in a block");
287 /* If the variable ist no liveness related var, bail out. */
288 if (!is_liveness_node(var))
291 stat_ev_ctx_push_fmt("lv_chk", "%u", get_irn_idx(var));
294 /* If there is no dominance relation, go out, too */
295 def_bl = get_nodes_block(var);
296 if (!block_dominates(def_bl, bl)) {
297 stat_ev("lv_chk_no_dom");
302 * If the block in question is the same as the definition block,
303 * the algorithm is simple. Just check for uses not inside this block.
306 stat_ev("lv_chk_def_block");
307 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl));
308 foreach_out_edge (var, edge) {
309 ir_node *use = get_edge_src_irn(edge);
312 if (!is_liveness_node(use))
315 stat_ev_cnt_inc(uses);
316 use_bl = get_nodes_block(use);
318 int pos = get_edge_src_pos(edge);
319 use_bl = get_Block_cfgpred_block(use_bl, pos);
322 DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
323 res |= lv_chk_state_end;
327 if (use_bl != def_bl) {
328 res = lv_chk_state_end | lv_chk_state_out;
337 * this is the more complicated case.
338 * We try to gather as much information as possible during looking
341 * Note that we know for sure that bl != def_bl. That is sometimes
342 * silently exploited below.
345 bl_info_t *def = get_block_info(lv, def_bl);
346 bl_info_t *bli = get_block_info(lv, bl);
347 bitset_t *uses = bitset_alloca(lv->n_blocks);
351 unsigned min_dom, max_dom;
353 /* if the block has no DFS info, it cannot be reached.
354 * This can happen in functions with endless loops.
355 * we then go out, since nothing is live there.
357 * TODO: Is that right?
363 DBG((lv->dbg, LEVEL_2, "lv check %+F (def in %+F #%d) in different block %+F #%d\n",
364 var, def_bl, def->id, bl, bli->id));
366 foreach_out_edge (var, edge) {
367 ir_node *user = get_edge_src_irn(edge);
368 int mask = lv_chk_state_in;
373 /* if the user is no liveness node, the use does not count */
374 if (!is_liveness_node(user))
377 stat_ev_cnt_inc(uses);
379 /* if the user is a phi, the use is in the predecessor
380 * furthermore, prepare a mask so that in the case where
381 * bl (the block in question) coincides with a use, it
382 * can be marked live_end there. */
383 use_bl = get_nodes_block(user);
385 int pos = get_edge_src_pos(edge);
386 use_bl = get_Block_cfgpred_block(use_bl, pos);
387 mask |= lv_chk_state_end;
391 /* if the use block coincides with the query block, we
392 * already gather a little liveness information.
393 * The variable is surely live there, since bl != def_bl
394 * (that case is treated above). */
398 bi = get_block_info(lv, use_bl);
401 bitset_set(uses, bi->id);
404 /* get the dominance range which really matters. all uses outside
405 * the definition's dominance range are not to consider. note,
406 * that the definition itself is also not considered. The case
407 * where bl == def_bl is considered above. */
408 min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
409 max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
411 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
413 /* prepare a set with all reachable back edge targets.
414 * this will determine our "looking points" from where
415 * we will search/find the calculated uses. */
416 Tq = bli->be_tgt_reach;
418 /* now, visit all viewing points in the temporary bitset lying
419 * in the dominance range of the variable. Note that for reducible
420 * flow-graphs the first iteration is sufficient and the loop
422 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", Tq, min_dom, max_dom));
423 i = bitset_next_set(Tq, min_dom);
424 while (i <= max_dom) {
425 bl_info_t *ti = lv->map[i];
426 int use_in_current_block = bitset_is_set(uses, ti->id);
428 stat_ev_cnt_inc(iter);
431 * This is somewhat tricky. Since this routine handles both, live in
432 * and end/out we have to handle all the border cases correctly.
433 * Each node is in its own red_reachable set (see calculation
434 * function above). That means, that in the case where bl == t, the
435 * intersection check of uses and reachability below will always
436 * find an intersection, namely t.
438 * However, if a block contains a use and the variable is dead
439 * afterwards, it is not live end/out at that block. Besides
440 * back-edge target. If a var is live-in at a back-edge target it
441 * is also live out/end there since the variable is live in the
442 * underlying loop. So in the case where t == bl and that is not
443 * a back-edge target, we have to remove that use from consideration
444 * to determine if the var is live out/end there.
446 * Note that the live in information has been calculated by the
447 * uses iteration above.
449 if (ti == bli && !bitset_is_set(lv->back_edge_tgt, ti->id)) {
450 DBG((lv->dbg, LEVEL_2, "\tlooking not from a back edge target and q == t. removing use: %d\n", ti->id));
451 bitset_clear(uses, ti->id);
454 /* If we can reach a use, the variable is live there and we say goodbye */
455 DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
456 if (bitset_intersect(ti->red_reachable, uses)) {
457 res |= lv_chk_state_in | lv_chk_state_out | lv_chk_state_end;
462 * if we deleted a use do to the commentary above, we have to
463 * re-add it since it might be visible from further view points
464 * (we only need that in the non-reducible case).
466 if (use_in_current_block)
467 bitset_set(uses, ti->id);
469 i = bitset_next_set(Tq, get_Block_dom_max_subtree_pre_num(ti->block) + 1);
475 stat_ev_tim_pop("lv_chk_query_time");
476 stat_ev_cnt_done(uses, "lv_chk_uses");
477 stat_ev_cnt_done(iter, "lv_chk_iter");
478 stat_ev_ctx_pop("lv_chk");