2 * Copyright (C) 1995-2007 Inria Rhone-Alpes. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * @author Sebastian Hack
24 * @version $Id: irlivechk.c 14865 2007-06-30 18:23:05Z matze $
27 * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
29 * The speciality here is, that nothing has to be recomputed if new nodes are created
30 * or old ones deleted.
32 * This algo has one core routine check_live_end_internal() which performs the liveness check.
33 * It only relies on the precomputation done in the constructor, which in turn needs:
35 * - the dominance tree
36 * - data obtained from a depth-first-search
38 * The precomputation remains valid as long as the CFG is not altered.
46 #include "irgraph_t.h"
47 #include "irphase_t.h"
48 #include "iredges_t.h"
56 #include "irlivechk_old.h"
60 typedef struct _bl_info_t {
61 ir_node *block; /**< The block. */
63 int id; /**< a tight number for the block.
64 we're just reusing the pre num from
67 bitset_t *red_reachable; /**< Holds all id's if blocks reachable
68 in the CFG modulo back edges. */
70 bitset_t *be_tgt_reach; /**< target blocks of back edges whose
71 sources are reachable from this block
72 in the reduced graph. */
74 bitset_t *be_tgt_dom; /**< target blocks of back edges which
75 are dominated by this block. */
78 #define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
80 struct _lv_chk_old_t {
83 DEBUG_ONLY(firm_dbg_module_t *dbg;)
85 bitset_t *back_edge_src;
86 bitset_t *back_edge_tgt;
90 static void *init_block_data(ir_phase *ph, ir_node *irn, void *old)
92 lv_chk_old_t *lv = container_of(ph, lv_chk_old_t, ph);
93 bl_info_t *bi = phase_alloc(ph, sizeof(bi[0]));
95 bi->id = dfs_get_pre_num(lv->dfs, irn);
97 bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
98 bi->be_tgt_reach = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
99 bi->be_tgt_dom = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
105 * Filter function to select all nodes for which liveness is computed.
107 * @return 1 if the node shall be considered in liveness, 0 if not.
109 static INLINE int is_liveness_node(const ir_node *irn)
111 switch(get_irn_opcode(irn)) {
123 * Compute the transitive closure on the reduced graph.
124 * The reduced graph is the original graph without back edges.
125 * Since that is a DAG, a reverse post order of the graph gives a toposort
126 * which is ideally suited to compute the transitive closure.
127 * Note also, that the DFS tree of the reduced graph is the same than the one
128 * of the original graph. This saves us computing a new reverse post order.
129 * We also can re-use the DFS tree of the original graph.
131 static void red_trans_closure(lv_chk_old_t *lv)
135 for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
136 ir_node *bl = dfs_get_post_num_node(lv->dfs, i);
137 bl_info_t *bi = get_block_info(lv, bl);
139 const ir_edge_t *edge;
141 foreach_block_succ (bl, edge) {
142 ir_node *succ = get_edge_src_irn(edge);
143 bl_info_t *si = get_block_info(lv, succ);
144 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
147 * if the successor is no back edge, include all reachable
148 * blocks from there into the reachable set of the current node
150 if (kind != DFS_EDGE_BACK) {
151 assert(dfs_get_post_num(lv->dfs, bl)
152 > dfs_get_post_num(lv->dfs, succ));
153 bitset_or(bi->red_reachable, si->red_reachable);
154 bitset_set(bi->red_reachable, si->id);
157 /* mark the block as a back edge src and succ as back edge tgt. */
159 bitset_set(lv->back_edge_src, bi->id);
160 bitset_set(lv->back_edge_tgt, si->id);
169 * Compute the two back edge sets for each block.
170 * <code>be_tgt_reach</code> contains all target blocks of a back edges reachable from a node.
171 * <code>be_tgt_dom</code> contains all target blocks of back edges strictly dominated
174 static void compute_back_edge_sets(lv_chk_old_t *lv, ir_node *bl)
176 bl_info_t *bi = phase_get_or_set_irn_data(&(lv)->ph, bl);
177 bitset_t *tmp = bitset_alloca(lv->n_blocks);
182 dominates_for_each (bl, n) {
183 bl_info_t *ni = phase_get_or_set_irn_data(&(lv)->ph, n);
185 /* compute information for dominance sub tree */
186 compute_back_edge_sets(lv, n);
189 * of course all blocks dominated by blocks in the
190 * subtree are also dominated by bl.
192 bitset_or(bi->be_tgt_dom, ni->be_tgt_dom);
195 * add the immeditate dominee to the back edge tgt dominance
196 * bitset if it is the target node of a back edge.
198 if (bitset_is_set(lv->back_edge_tgt, ni->id))
199 bitset_set(bi->be_tgt_dom, ni->id);
203 * iterate over all back edge src nodes which are reachable from
204 * this nodes and put the targets of the back edges in the be_tgt_reach
205 * bitset of the node.
207 bitset_copy(tmp, bi->red_reachable);
208 bitset_set(tmp, bi->id);
209 bitset_and(tmp, lv->back_edge_src);
210 bitset_foreach (tmp, elm) {
211 ir_node *src = lv->map[elm]->block;
212 const ir_edge_t *edge;
214 foreach_block_succ (src, edge) {
215 ir_node *succ = get_edge_src_irn(edge);
216 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, src, succ);
218 if (kind == DFS_EDGE_BACK) {
219 bl_info_t *si = get_block_info(lv, succ);
220 bitset_set(bi->be_tgt_reach, si->id);
226 lv_chk_old_t *lv_chk_old_new(ir_graph *irg)
228 lv_chk_old_t *res = xmalloc(sizeof(res[0]));
229 struct obstack *obst;
232 phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL);
233 obst = phase_obst(&res->ph);
235 FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
237 res->dfs = dfs_new(&absgraph_irg_cfg_succ, irg);
238 res->n_blocks = dfs_get_n_nodes(res->dfs);
239 res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
240 res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
241 res->map = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0]));
244 memset(&res->stat_data, 0, sizeof(res->stat_data));
245 res->stat = &res->stat_data;
251 ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg);
252 if ((f = fopen(name, "wt")) != NULL) {
253 dfs_dump(res->dfs, f);
256 dump_ir_block_graph(irg, "-lvchk");
260 /* fill the map which maps pre_num to block infos */
261 for (i = res->n_blocks - 1; i >= 0; --i) {
262 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
263 res->map[i] = phase_get_or_set_irn_data(&res->ph, irn);
266 /* first of all, compute the transitive closure of the CFG *without* back edges */
267 red_trans_closure(res);
269 /* now fill the two remaining bitsets concerning back edges */
270 compute_back_edge_sets(res, get_irg_start_block(irg));
273 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
274 for (i = res->n_blocks - 1; i >= 0; --i) {
275 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
276 bl_info_t *bi = get_block_info(res, irn);
277 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
278 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
279 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
280 DBG((res->dbg, LEVEL_1, "\ttgt dom: %B\n", bi->be_tgt_dom));
284 DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
285 DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
290 void lv_chk_old_free(lv_chk_old_t *lv)
292 obstack_free(phase_obst(&lv->ph), NULL);
298 * Check if a node is live at the end of a block.
299 * This function is for internal use as its code is shared between
300 * the in/end routines below. It is almost the "live_end" routine
301 * but passing in the bitset for recording the blocks where the variable
302 * is used saves some effort in the "live_in" routine. See below for
305 * @param lv The liveness check environment.
306 * @param what The node to check for.
307 * @param bl The block under investigation.
308 * @param uses A bitset where this routine records all ids of blocks
309 * where this variable is used. Note that the bitset
310 * is only guaranteed to be filled if the node was not
311 * live at the end of the block.
312 * @return 1, if @p what is live at the end at @p bl.
314 unsigned lv_chk_old_bl_xxx(const lv_chk_old_t *lv, const ir_node *bl, const ir_node *what)
316 stat_ev_cnt_decl(uses);
317 stat_ev_cnt_decl(iter);
322 assert(is_Block(bl) && "can only check for liveness in a block");
324 if (!is_liveness_node(what))
327 stat_ev_ctx_push_fobj("node", what);
330 what_bl = get_nodes_block(what);
331 if (!block_dominates(what_bl, bl)) {
332 stat_ev("lv_chk_no_dom");
337 * If the block in question is the same as the definition block,
338 * the algorithm is simple. Just check for uses not inside this block.
341 const ir_edge_t *edge;
343 stat_ev("lv_chk_def_block");
344 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", what, bl));
345 foreach_out_edge (what, edge) {
346 ir_node *use = get_edge_src_irn(edge);
349 if (!is_liveness_node(use))
352 stat_ev_cnt_inc(uses);
353 use_bl = get_nodes_block(use);
355 int pos = get_edge_src_pos(edge);
356 use_bl = get_Block_cfgpred_block(use_bl, pos);
359 DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
360 res |= lv_chk_state_end;
364 if (use_bl != what_bl) {
365 res = lv_chk_state_end | lv_chk_state_out;
373 /* this is the complicated case */
375 bitset_t *visited = bitset_alloca(lv->n_blocks);
376 bitset_t *to_visit = bitset_alloca(lv->n_blocks);
377 bitset_t *next = bitset_alloca(lv->n_blocks);
378 bitset_t *uses = bitset_alloca(lv->n_blocks);
379 bl_info_t *def = get_block_info(lv, what_bl);
380 bl_info_t *bli = get_block_info(lv, bl);
382 const ir_edge_t *edge;
384 DBG((lv->dbg, LEVEL_2, "lv check different block %+F in %+F\n", what, bl));
385 foreach_out_edge (what, edge) {
386 ir_node *user = get_edge_src_irn(edge);
390 if (!is_liveness_node(user))
393 stat_ev_cnt_inc(uses);
394 use_bl = get_nodes_block(user);
396 int pos = get_edge_src_pos(edge);
398 use_bl = get_Block_cfgpred_block(use_bl, pos);
399 bi = get_block_info(lv, use_bl);
402 res |= lv_chk_state_end | lv_chk_state_in;
404 bitset_set(uses, bi->id);
408 bi = get_block_info(lv, use_bl);
409 bitset_set(uses, bi->id);
411 res |= lv_chk_state_in;
415 DBG((lv->dbg, LEVEL_2, "\tuses: %B, #: %d\n", uses, bitset_popcnt(uses)));
417 bitset_clear(uses, def->id);
418 bitset_set(to_visit, bli->id);
420 int id = bitset_next_set(to_visit, 0);
421 bl_info_t *bi = lv->map[id];
423 stat_ev_cnt_inc(iter);
424 DBG((lv->dbg, LEVEL_2, "\tto visit: %B\n", to_visit));
425 DBG((lv->dbg, LEVEL_2, "\tvisited: %B\n", visited));
428 * if one of the blocks is reachable, the node must be live there.
429 * Note that this is not sufficient, since the nodes reachable
430 * via back edges are not contained in the red_reachable set.
432 if (bitset_intersect(bi->red_reachable, uses)) {
433 res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
438 * if not, we have to check the back edges in question, if
439 * they lead to places which are reachable.
442 bitset_set(visited, id);
443 bitset_or(visited, bi->red_reachable);
445 bitset_copy(next, bi->be_tgt_reach);
446 bitset_and(next, def->be_tgt_dom);
447 DBG((lv->dbg, LEVEL_2, "\tnext: %B\n----\n", next));
449 if (bitset_intersect(uses, next)) {
450 res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
454 bitset_or(to_visit, next);
455 bitset_andnot(to_visit, visited);
458 } while (!bitset_is_empty(to_visit));
462 stat_ev_cnt_done(uses, "lv_chk_uses");
463 stat_ev_cnt_done(iter, "lv_chk_iter");