2 * Copyright (C) 1995-2007 Inria Rhone-Alpes. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * @author Sebastian Hack
27 * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
29 * The speciality here is, that nothing has to be recomputed if new nodes are created
30 * or old ones deleted.
32 * This algo has one core routine check_live_end_internal() which performs the liveness check.
33 * It only relies on the precomputation done in the constructor, which in turn needs:
35 * - the dominance tree
36 * - data obtained from a depth-first-search
38 * The precomputation remains valid as long as the CFG is not altered.
46 #include "irgraph_t.h"
48 #include "irphase_t.h"
49 #include "iredges_t.h"
59 #include "irlivechk.h"
63 typedef struct _bl_info_t {
64 const ir_node *block; /**< The block. */
67 int id : 31; /**< a tight number for the block.
68 we're just reusing the pre num from
70 bitset_t *red_reachable; /**< Holds all id's if blocks reachable
71 in the CFG modulo back edges. */
73 bitset_t *be_tgt_reach; /**< target blocks of back edges whose
74 sources are reachable from this block
75 in the reduced graph. */
78 #define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
83 DEBUG_ONLY(firm_dbg_module_t *dbg;)
85 bitset_t *back_edge_src;
86 bitset_t *back_edge_tgt;
90 static void *init_block_data(ir_phase *ph, const ir_node *irn, void *old)
92 lv_chk_t *lv = container_of(ph, lv_chk_t, ph);
93 bl_info_t *bi = phase_alloc(ph, sizeof(bi[0]));
95 bi->id = get_Block_dom_tree_pre_num(irn);
97 bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
98 bi->be_tgt_reach = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
105 * Filter function to select all nodes for which liveness is computed.
107 * @return 1 if the node shall be considered in liveness, 0 if not.
109 static INLINE int is_liveness_node(const ir_node *irn)
111 switch(get_irn_opcode(irn)) {
124 * Compute the transitive closure on the reduced graph.
125 * The reduced graph is the original graph without back edges.
126 * Since that is a DAG, a reverse post order of the graph gives a toposort
127 * which is ideally suited to compute the transitive closure.
128 * Note also, that the DFS tree of the reduced graph is the same than the one
129 * of the original graph. This saves us computing a new reverse post order.
130 * We also can re-use the DFS tree of the original graph.
132 static void red_trans_closure(lv_chk_t *lv)
136 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
137 const ir_node *bl = dfs_get_post_num_node(lv->dfs, i);
138 bl_info_t *bi = get_block_info(lv, bl);
140 const ir_edge_t *edge;
142 bitset_set(bi->red_reachable, bi->id);
143 foreach_block_succ (bl, edge) {
144 ir_node *succ = get_edge_src_irn(edge);
145 bl_info_t *si = get_block_info(lv, succ);
146 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
149 * if the successor is no back edge, include all reachable
150 * blocks from there into the reachable set of the current node
152 if (kind != DFS_EDGE_BACK) {
153 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
154 bitset_or(bi->red_reachable, si->red_reachable);
157 /* mark the block as a back edge src and succ as back edge tgt. */
159 bitset_set(lv->back_edge_src, bi->id);
160 bitset_set(lv->back_edge_tgt, si->id);
168 static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
170 bitset_t *tmp = bitset_alloca(lv->n_blocks);
171 bl_info_t *bi = get_block_info(lv, bl);
175 DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id));
177 /* put all back edge sources reachable (reduced) from here in tmp */
178 bitset_copy(tmp, bi->red_reachable);
179 bitset_set(tmp, bi->id);
180 bitset_and(tmp, lv->back_edge_src);
183 DBG((lv->dbg, LEVEL_2, "\treachable be src: %B\n", tmp));
185 /* iterate over them ... */
186 bitset_foreach(tmp, elm) {
187 bl_info_t *si = lv->map[elm];
188 const ir_edge_t *edge;
190 /* and find back edge targets which are not reduced reachable from bl */
191 foreach_block_succ (si->block, edge) {
192 ir_node *tgt = get_edge_src_irn(edge);
193 bl_info_t *ti = get_block_info(lv, tgt);
194 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, si->block, tgt);
196 if (kind == DFS_EDGE_BACK && !bitset_is_set(bi->red_reachable, ti->id)) {
197 if (!ti->be_tgt_calc)
198 compute_back_edge_chain(lv, tgt);
199 bitset_set(bi->be_tgt_reach, ti->id);
200 bitset_or(bi->be_tgt_reach, ti->be_tgt_reach);
203 bitset_clear(bi->be_tgt_reach, bi->id);
208 static INLINE void compute_back_edge_chains(lv_chk_t *lv)
213 DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src));
214 bitset_foreach(lv->back_edge_src, elm) {
215 compute_back_edge_chain(lv, lv->map[elm]->block);
218 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
219 const ir_node *bl = dfs_get_post_num_node(lv->dfs, i);
220 bl_info_t *bi = get_block_info(lv, bl);
222 const ir_edge_t *edge;
224 if (!bitset_is_set(lv->back_edge_tgt, bi->id)) {
225 foreach_block_succ (bl, edge) {
226 ir_node *succ = get_edge_src_irn(edge);
227 bl_info_t *si = get_block_info(lv, succ);
228 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
230 if (kind != DFS_EDGE_BACK) {
231 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
232 bitset_or(bi->be_tgt_reach, si->be_tgt_reach);
238 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
239 const ir_node *bl = dfs_get_post_num_node(lv->dfs, i);
240 bl_info_t *bi = get_block_info(lv, bl);
241 bitset_set(bi->be_tgt_reach, bi->id);
245 lv_chk_t *lv_chk_new(ir_graph *irg, const dfs_t *dfs)
247 lv_chk_t *res = xmalloc(sizeof(res[0]));
248 struct obstack *obst;
251 edges_deactivate(irg);
256 phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL);
257 obst = phase_obst(&res->ph);
259 FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
262 res->n_blocks = dfs_get_n_nodes(res->dfs);
263 res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
264 res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
265 res->map = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0]));
266 memset(res->map, 0, res->n_blocks * sizeof(res->map[0]));
272 ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg);
273 if ((f = fopen(name, "wt")) != NULL) {
274 dfs_dump(res->dfs, f);
277 dump_ir_block_graph(irg, "-lvchk");
281 /* fill the map which maps pre_num to block infos */
282 for (i = res->n_blocks - 1; i >= 0; --i) {
283 ir_node *irn = (ir_node *) dfs_get_pre_num_node(res->dfs, i);
284 bl_info_t *bi = phase_get_or_set_irn_data(&res->ph, irn);
285 assert(bi->id < res->n_blocks);
286 assert(res->map[bi->id] == NULL);
287 res->map[bi->id] = bi;
290 /* first of all, compute the transitive closure of the CFG *without* back edges */
291 red_trans_closure(res);
293 /* compute back edge chains */
294 compute_back_edge_chains(res);
297 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
298 for (i = res->n_blocks - 1; i >= 0; --i) {
299 const ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
300 bl_info_t *bi = get_block_info(res, irn);
301 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
302 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
303 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
307 DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
308 DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
310 stat_ev_tim_pop("lv_chk_cons_time");
314 void lv_chk_free(lv_chk_t *lv)
316 obstack_free(phase_obst(&lv->ph), NULL);
321 * Check if a node is live at the end of a block.
322 * This function is for internal use as its code is shared between
323 * the in/end routines below. It is almost the "live_end" routine
324 * but passing in the bitset for recording the blocks where the variable
325 * is used saves some effort in the "live_in" routine. See below for
328 * @param lv The liveness check environment.
329 * @param what The node to check for.
330 * @param bl The block under investigation.
331 * @param uses A bitset where this routine records all ids of blocks
332 * where this variable is used. Note that the bitset
333 * is only guaranteed to be filled if the node was not
334 * live at the end of the block.
335 * @return 1, if @p what is live at the end at @p bl.
337 unsigned lv_chk_bl_in_mask(const lv_chk_t *lv, const ir_node *bl, const ir_node *var)
339 stat_ev_cnt_decl(uses);
342 const ir_edge_t *edge;
346 assert(is_Block(bl) && "can only check for liveness in a block");
348 if (!is_liveness_node(var))
351 def_bl = get_nodes_block(var);
352 if (def_bl == bl || !block_dominates(def_bl, bl)) {
357 bitset_t *uses = bitset_alloca(lv->n_blocks);
358 bitset_t *tmp = bitset_alloca(lv->n_blocks);
359 int min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
360 int max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
361 bl_info_t *bli = get_block_info(lv, bl);
364 DBG((lv->dbg, LEVEL_2, "lv check of %+F, def=%+F,%d != q=%+F,%d\n",
365 var, def_bl, min_dom - 1, bl, bli->id));
367 foreach_out_edge (var, edge) {
368 ir_node *user = get_edge_src_irn(edge);
372 if (!is_liveness_node(user))
375 stat_ev_cnt_inc(uses);
376 use_bl = get_nodes_block(user);
378 int pos = get_edge_src_pos(edge);
379 use_bl = get_Block_cfgpred_block(use_bl, pos);
383 res = lv_chk_state_in;
384 DBG((lv->dbg, LEVEL_2, "\tuse directly in block %+F by %+F\n", use_bl, user));
388 bi = get_block_info(lv, use_bl);
389 bitset_set(uses, bi->id);
392 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
396 bitset_copy(tmp, bli->be_tgt_reach);
397 bitset_set(tmp, bli->id);
399 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", tmp, min_dom, max_dom));
400 for (i = bitset_next_set(tmp, min_dom); i >= 0 && i <= max_dom; i = bitset_next_set(tmp, i + 1)) {
401 bl_info_t *ti = lv->map[i];
402 DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
403 if (bitset_intersect(ti->red_reachable, uses)) {
404 res = lv_chk_state_in;
408 bitset_andnot(tmp, ti->red_reachable);
417 unsigned lv_chk_bl_end_mask(const lv_chk_t *lv, const ir_node *bl, const ir_node *var)
419 stat_ev_cnt_decl(uses);
422 const ir_edge_t *edge;
426 assert(is_Block(bl) && "can only check for liveness in a block");
428 if (!is_liveness_node(var))
431 def_bl = get_nodes_block(var);
432 if (!block_dominates(def_bl, bl)) {
437 bitset_t *uses = bitset_alloca(lv->n_blocks);
438 bitset_t *tmp = bitset_alloca(lv->n_blocks);
439 int min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
440 int max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
441 bl_info_t *bli = get_block_info(lv, bl);
444 DBG((lv->dbg, LEVEL_2, "lv end check of %+F, def=%+F,%d != q=%+F,%d\n",
445 var, def_bl, min_dom - 1, bl, bli->id));
447 foreach_out_edge (var, edge) {
448 ir_node *user = get_edge_src_irn(edge);
452 if (!is_liveness_node(user))
455 stat_ev_cnt_inc(uses);
456 use_bl = get_nodes_block(user);
458 int pos = get_edge_src_pos(edge);
459 use_bl = get_Block_cfgpred_block(use_bl, pos);
462 res |= lv_chk_state_end;
465 bi = get_block_info(lv, use_bl);
466 if (use_bl != bl || bitset_is_set(lv->back_edge_tgt, bi->id))
467 bitset_set(uses, bi->id);
470 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
472 bitset_copy(tmp, bli->be_tgt_reach);
473 bitset_set(tmp, bli->id);
475 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach + current: %B, dom span: [%d, %d]\n", tmp, min_dom, max_dom));
476 for (i = bitset_next_set(tmp, min_dom); i >= 0 && i <= max_dom; i = bitset_next_set(tmp, i + 1)) {
477 bl_info_t *ti = lv->map[i];
478 DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
479 if (bitset_intersect(ti->red_reachable, uses)) {
480 res = lv_chk_state_out | lv_chk_state_end;
484 bitset_andnot(tmp, ti->red_reachable);
493 * Check a nodes liveness situation of a block.
494 * This routine considers both cases, the live in and end/out case.
496 * @param lv The liveness check environment.
497 * @param bl The block under investigation.
498 * @param var The node to check for.
499 * @return A bitmask of lv_chk_state_XXX fields.
501 unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var)
503 stat_ev_cnt_decl(uses);
504 stat_ev_cnt_decl(iter);
509 assert(is_Block(bl) && "can only check for liveness in a block");
511 /* If the variable ist no liveness related var, bail out. */
512 if (!is_liveness_node(var))
515 stat_ev_ctx_push_fmt("lv_chk", "%u", get_irn_idx(var));
518 /* If there is no dominance relation, go out, too */
519 def_bl = get_nodes_block(var);
520 if (!block_dominates(def_bl, bl)) {
521 stat_ev("lv_chk_no_dom");
526 * If the block in question is the same as the definition block,
527 * the algorithm is simple. Just check for uses not inside this block.
530 const ir_edge_t *edge;
532 stat_ev("lv_chk_def_block");
533 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl));
534 foreach_out_edge (var, edge) {
535 ir_node *use = get_edge_src_irn(edge);
538 if (!is_liveness_node(use))
541 stat_ev_cnt_inc(uses);
542 use_bl = get_nodes_block(use);
544 int pos = get_edge_src_pos(edge);
545 use_bl = get_Block_cfgpred_block(use_bl, pos);
548 DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
549 res |= lv_chk_state_end;
553 if (use_bl != def_bl) {
554 res = lv_chk_state_end | lv_chk_state_out;
563 * this is the more complicated case.
564 * We try to gather as much information as possible during looking
567 * Note that we know for shure that bl != def_bl. That is sometimes
568 * silently exploited below.
571 bl_info_t *def = get_block_info(lv, def_bl);
572 bl_info_t *bli = get_block_info(lv, bl);
573 bitset_t *uses = bitset_alloca(lv->n_blocks);
576 unsigned i, min_dom, max_dom;
577 const ir_edge_t *edge;
579 /* if the block has no DFS info, it cannot be reached.
580 * This can happen in functions with endless loops.
581 * we then go out, since nothing is live there.
583 * TODO: Is that right?
589 DBG((lv->dbg, LEVEL_2, "lv check %+F (def in %+F #%d) in different block %+F #%d\n",
590 var, def_bl, def->id, bl, bli->id));
592 foreach_out_edge (var, edge) {
593 ir_node *user = get_edge_src_irn(edge);
594 int mask = lv_chk_state_in;
599 /* if the user is no liveness node, the use does not count */
600 if (!is_liveness_node(user))
603 stat_ev_cnt_inc(uses);
605 /* if the user is a phi, the use is in the predecessor
606 * furthermore, prepare a mask so that in the case where
607 * bl (the block in question) coincides with a use, it
608 * can be marked live_end there. */
609 use_bl = get_nodes_block(user);
611 int pos = get_edge_src_pos(edge);
612 use_bl = get_Block_cfgpred_block(use_bl, pos);
613 mask |= lv_chk_state_end;
617 /* if the use block coincides with the query block, we
618 * already gather a little liveness information.
619 * The variable is surely live there, since bl != def_bl
620 * (that case is treated above). */
624 bi = get_block_info(lv, use_bl);
627 bitset_set(uses, bi->id);
630 /* get the dominance range which really matters. all uses outside
631 * the definition's dominance range are not to consider. note,
632 * that the definition itself is also not considered. The case
633 * where bl == def_bl is considered above. */
634 min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
635 max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
637 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
639 /* prepare a set with all reachable back edge targets.
640 * this will determine our "looking points" from where
641 * we will search/find the calculated uses. */
642 Tq = bli->be_tgt_reach;
644 /* now, visit all viewing points in the temporary bitset lying
645 * in the dominance range of the variable. Note that for reducible
646 * flow-graphs the first iteration is sufficient and the loop
648 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", Tq, min_dom, max_dom));
649 i = bitset_next_set(Tq, min_dom);
650 while(i <= max_dom) {
651 bl_info_t *ti = lv->map[i];
652 int use_in_current_block = bitset_is_set(uses, ti->id);
654 stat_ev_cnt_inc(iter);
657 * This is somehat tricky. Since this routine handles both, live in
658 * and end/out we have to handle all the border cases correctly.
659 * Each node is in its own red_reachable set (see calculation
660 * function above). That means, that in the case where bl == t, the
661 * intersection check of uses and rechability below will always
662 * find an intersection, namely t.
664 * However, if a block contains a use and the variable is dead
665 * afterwards, it is not live end/out at that block. Besides
666 * back-edge target. If a var is live-in at a back-edge target it
667 * is also live out/end there since the variable is live in the
668 * underlying loop. So in the case where t == bl and that is not
669 * a back-edge target, we have to remove that use from consideration
670 * to determine if the var is live out/end there.
672 * Note that the live in information has been calculated by the
673 * uses iteration above.
675 if (ti == bli && !bitset_is_set(lv->back_edge_tgt, ti->id)) {
676 DBG((lv->dbg, LEVEL_2, "\tlooking not from a back edge target and q == t. removing use: %d\n", ti->id));
677 bitset_clear(uses, ti->id);
680 /* If we can reach a use, the variable is live there and we say goodbye */
681 DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
682 if (bitset_intersect(ti->red_reachable, uses)) {
683 res |= lv_chk_state_in | lv_chk_state_out | lv_chk_state_end;
688 * if we deleted a use do to the commentary above, we have to
689 * re-add it since it might be visible from further view points
690 * (we only need that in the non-reducible case).
692 if (use_in_current_block)
693 bitset_set(uses, ti->id);
695 i = bitset_next_set(Tq, get_Block_dom_max_subtree_pre_num(ti->block) + 1);
701 stat_ev_tim_pop("lv_chk_query_time");
702 stat_ev_cnt_done(uses, "lv_chk_uses");
703 stat_ev_cnt_done(iter, "lv_chk_iter");
704 stat_ev_ctx_pop("lv_chk");