X-Git-Url: http://nsz.repo.hu/git/?a=blobdiff_plain;f=ir%2Fana%2Firlivechk.c;h=89b12f101ba712bfac227ee89e531dc41f825724;hb=6f2df337ba40d5c7461f9459fd57efbcc0686c9b;hp=eae3a6b9f4e7710d0d49c65935f22d75d50fe5b8;hpb=1ce3e22f3329f4186b264e1ca441fe5f8e45aa7f;p=libfirm diff --git a/ir/ana/irlivechk.c b/ir/ana/irlivechk.c index eae3a6b9f..89b12f101 100644 --- a/ir/ana/irlivechk.c +++ b/ir/ana/irlivechk.c @@ -21,8 +21,7 @@ * @file livechk.c * @date 21.04.2007 * @author Sebastian Hack - * @version $Id$ - * @summary + * @brief * * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself. * @@ -37,15 +36,16 @@ * * The precomputation remains valid as long as the CFG is not altered. */ -#ifdef HAVE_CONFIG_H #include -#endif #include +/* statev is expensive here, only enable when needed */ +#define DISABLE_STATEV + #include "irgraph_t.h" #include "irnode_t.h" -#include "irphase_t.h" +#include "irnodemap.h" #include "iredges_t.h" #include "irprintf.h" @@ -58,10 +58,10 @@ #include "irlivechk.h" -#include "statev.h" +#include "statev_t.h" -typedef struct _bl_info_t { - ir_node *block; /**< The block. */ +typedef struct bl_info_t { + const ir_node *block; /**< The block. */ int be_tgt_calc : 1; int id : 31; /**< a tight number for the block. @@ -70,35 +70,35 @@ typedef struct _bl_info_t { bitset_t *red_reachable; /**< Holds all id's if blocks reachable in the CFG modulo back edges. */ - bitset_t *be_tgt_reach; /**< target blocks of back edges whose + bitset_t *be_tgt_reach; /**< target blocks of back edges whose sources are reachable from this block in the reduced graph. */ } bl_info_t; -#define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl)) - -struct _lv_chk_t { - ir_phase ph; - dfs_t *dfs; +struct lv_chk_t { + ir_nodemap block_infos; + struct obstack obst; + dfs_t *dfs; + int n_blocks; + bitset_t *back_edge_src; + bitset_t *back_edge_tgt; + bl_info_t **map; DEBUG_ONLY(firm_dbg_module_t *dbg;) - int n_blocks; - bitset_t *back_edge_src; - bitset_t *back_edge_tgt; - bl_info_t **map; }; -static void *init_block_data(ir_phase *ph, ir_node *irn, void *old) +static bl_info_t *get_block_info(lv_chk_t *lv, const ir_node *block) { - lv_chk_t *lv = container_of(ph, lv_chk_t, ph); - bl_info_t *bi = phase_alloc(ph, sizeof(bi[0])); - - bi->id = get_Block_dom_tree_pre_num(irn); - bi->block = irn; - bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks); - bi->be_tgt_reach = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks); - bi->be_tgt_calc = 0; - (void) old; - return bi; + bl_info_t *info = ir_nodemap_get(bl_info_t, &lv->block_infos, block); + if (info == NULL) { + info = OALLOC(&lv->obst, bl_info_t); + info->id = get_Block_dom_tree_pre_num(block); + info->block = block; + info->red_reachable = bitset_obstack_alloc(&lv->obst, lv->n_blocks); + info->be_tgt_reach = bitset_obstack_alloc(&lv->obst, lv->n_blocks); + info->be_tgt_calc = 0; + ir_nodemap_insert(&lv->block_infos, block, info); + } + return info; } /** @@ -106,14 +106,16 @@ static void *init_block_data(ir_phase *ph, ir_node *irn, void *old) * @param irn A node. * @return 1 if the node shall be considered in liveness, 0 if not. */ -static INLINE int is_liveness_node(const ir_node *irn) +static inline int is_liveness_node(const ir_node *irn) { - switch(get_irn_opcode(irn)) { + switch (get_irn_opcode(irn)) { case iro_Block: case iro_Bad: case iro_End: + case iro_Anchor: return 0; - default:; + default: + break; } return 1; @@ -133,11 +135,9 @@ static void red_trans_closure(lv_chk_t *lv) int i, n; for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) { - ir_node *bl = dfs_get_post_num_node(lv->dfs, i); + const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i); bl_info_t *bi = get_block_info(lv, bl); - const ir_edge_t *edge; - bitset_set(bi->red_reachable, bi->id); foreach_block_succ (bl, edge) { ir_node *succ = get_edge_src_irn(edge); @@ -164,13 +164,11 @@ static void red_trans_closure(lv_chk_t *lv) } -static void compute_back_edge_chain(lv_chk_t *lv, ir_node *bl) +static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl) { bitset_t *tmp = bitset_alloca(lv->n_blocks); bl_info_t *bi = get_block_info(lv, bl); - bitset_pos_t elm; - DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id)); /* put all back edge sources reachable (reduced) from here in tmp */ @@ -184,7 +182,6 @@ static void compute_back_edge_chain(lv_chk_t *lv, ir_node *bl) /* iterate over them ... */ bitset_foreach(tmp, elm) { bl_info_t *si = lv->map[elm]; - const ir_edge_t *edge; /* and find back edge targets which are not reduced reachable from bl */ foreach_block_succ (si->block, edge) { @@ -204,9 +201,8 @@ static void compute_back_edge_chain(lv_chk_t *lv, ir_node *bl) } -static INLINE void compute_back_edge_chains(lv_chk_t *lv) +static inline void compute_back_edge_chains(lv_chk_t *lv) { - bitset_pos_t elm; int i, n; DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src)); @@ -215,10 +211,8 @@ static INLINE void compute_back_edge_chains(lv_chk_t *lv) } for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) { - ir_node *bl = dfs_get_post_num_node(lv->dfs, i); - bl_info_t *bi = get_block_info(lv, bl); - - const ir_edge_t *edge; + const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i); + bl_info_t *bi = get_block_info(lv, bl); if (!bitset_is_set(lv->back_edge_tgt, bi->id)) { foreach_block_succ (bl, edge) { @@ -233,42 +227,39 @@ static INLINE void compute_back_edge_chains(lv_chk_t *lv) } } } + + for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) { + const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i); + bl_info_t *bi = get_block_info(lv, bl); + bitset_set(bi->be_tgt_reach, bi->id); + } } lv_chk_t *lv_chk_new(ir_graph *irg) { - lv_chk_t *res = xmalloc(sizeof(res[0])); - struct obstack *obst; + lv_chk_t *res = XMALLOC(lv_chk_t); int i; - phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL); - obst = phase_obst(&res->ph); + assure_doms(irg); + + stat_ev_tim_push(); + ir_nodemap_init(&res->block_infos, irg); + obstack_init(&res->obst); FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk"); res->dfs = dfs_new(&absgraph_irg_cfg_succ, irg); res->n_blocks = dfs_get_n_nodes(res->dfs); - res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks); - res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks); - res->map = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0])); - -#if 0 - { - char name[256]; - FILE *f; - ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg); - if ((f = fopen(name, "wt")) != NULL) { - dfs_dump(res->dfs, f); - fclose(f); - } - dump_ir_block_graph(irg, "-lvchk"); - } -#endif + res->back_edge_src = bitset_obstack_alloc(&res->obst, res->n_blocks); + res->back_edge_tgt = bitset_obstack_alloc(&res->obst, res->n_blocks); + res->map = OALLOCNZ(&res->obst, bl_info_t*, res->n_blocks); /* fill the map which maps pre_num to block infos */ for (i = res->n_blocks - 1; i >= 0; --i) { - ir_node *irn = dfs_get_pre_num_node(res->dfs, i); - bl_info_t *bi = phase_get_or_set_irn_data(&res->ph, irn); + ir_node *irn = (ir_node *) dfs_get_pre_num_node(res->dfs, i); + bl_info_t *bi = get_block_info(res, irn); + assert(bi->id < res->n_blocks); + assert(res->map[bi->id] == NULL); res->map[bi->id] = bi; } @@ -278,228 +269,47 @@ lv_chk_t *lv_chk_new(ir_graph *irg) /* compute back edge chains */ compute_back_edge_chains(res); - -DEBUG_ONLY({ - DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg)); - for (i = res->n_blocks - 1; i >= 0; --i) { - ir_node *irn = dfs_get_pre_num_node(res->dfs, i); - bl_info_t *bi = get_block_info(res, irn); - DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn)); - DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable)); - DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach)); - } - }) +#ifndef NDEBUG + DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg)); + for (i = res->n_blocks - 1; i >= 0; --i) { + const ir_node *irn = (const ir_node*) dfs_get_pre_num_node(res->dfs, i); + bl_info_t *bi = get_block_info(res, irn); + DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn)); + DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable)); + DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach)); + } +#endif DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src)); DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt)); + stat_ev_tim_pop("lv_chk_cons_time"); return res; } void lv_chk_free(lv_chk_t *lv) { - obstack_free(phase_obst(&lv->ph), NULL); dfs_free(lv->dfs); + obstack_free(&lv->obst, NULL); + ir_nodemap_destroy(&lv->block_infos); xfree(lv); } -/** - * Check if a node is live at the end of a block. - * This function is for internal use as its code is shared between - * the in/end routines below. It is almost the "live_end" routine - * but passing in the bitset for recording the blocks where the variable - * is used saves some effort in the "live_in" routine. See below for - * details. - * - * @param lv The liveness check environment. - * @param what The node to check for. - * @param bl The block under investigation. - * @param uses A bitset where this routine records all ids of blocks - * where this variable is used. Note that the bitset - * is only guaranteed to be filled if the node was not - * live at the end of the block. - * @return 1, if @p what is live at the end at @p bl. - */ -unsigned lv_chk_bl_in_mask(const lv_chk_t *lv, const ir_node *bl, const ir_node *var) -{ - stat_ev_cnt_decl(uses); - - ir_node *def_bl; - const ir_edge_t *edge; - - int res = 0; - - assert(is_Block(bl) && "can only check for liveness in a block"); - - if (!is_liveness_node(var)) - return 0; - - def_bl = get_nodes_block(var); - if (def_bl == bl || !block_dominates(def_bl, bl)) { - goto end; - } - - else { - bitset_t *uses = bitset_alloca(lv->n_blocks); - bitset_t *tmp = bitset_alloca(lv->n_blocks); - int min_dom = get_Block_dom_tree_pre_num(def_bl) + 1; - int max_dom = get_Block_dom_max_subtree_pre_num(def_bl); - bl_info_t *bli = get_block_info(lv, bl); - int i; - - DBG((lv->dbg, LEVEL_2, "lv check of %+F, def=%+F,%d != q=%+F,%d\n", - var, def_bl, min_dom - 1, bl, bli->id)); - - foreach_out_edge (var, edge) { - ir_node *user = get_edge_src_irn(edge); - ir_node *use_bl; - bl_info_t *bi; - - if (!is_liveness_node(user)) - continue; - - stat_ev_cnt_inc(uses); - use_bl = get_nodes_block(user); - if (is_Phi(user)) { - int pos = get_edge_src_pos(edge); - use_bl = get_Block_cfgpred_block(use_bl, pos); - } - - if (use_bl == bl) { - res = lv_chk_state_in; - DBG((lv->dbg, LEVEL_2, "\tuse directly in block %+F by %+F\n", use_bl, user)); - goto end; - } - - bi = get_block_info(lv, use_bl); - bitset_set(uses, bi->id); - } - - DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses)); - - { - - bitset_copy(tmp, bli->be_tgt_reach); - bitset_set(tmp, bli->id); - - DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", tmp, min_dom, max_dom)); - for (i = bitset_next_set(tmp, min_dom); i >= 0 && i <= max_dom; i = bitset_next_set(tmp, i + 1)) { - bl_info_t *ti = lv->map[i]; - DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable)); - if (bitset_intersect(ti->red_reachable, uses)) { - res = lv_chk_state_in; - goto end; - } - - bitset_andnot(tmp, ti->red_reachable); - } - } - } - -end: - return res; -} - -unsigned lv_chk_bl_end_mask(const lv_chk_t *lv, const ir_node *bl, const ir_node *var) +unsigned lv_chk_bl_xxx(lv_chk_t *lv, const ir_node *bl, const ir_node *var) { - stat_ev_cnt_decl(uses); - + int res = 0; ir_node *def_bl; - const ir_edge_t *edge; - - int res = 0; - - assert(is_Block(bl) && "can only check for liveness in a block"); - - if (!is_liveness_node(var)) - return 0; - - def_bl = get_nodes_block(var); - if (!block_dominates(def_bl, bl)) { - goto end; - } - - else { - bitset_t *uses = bitset_alloca(lv->n_blocks); - bitset_t *tmp = bitset_alloca(lv->n_blocks); - int min_dom = get_Block_dom_tree_pre_num(def_bl) + 1; - int max_dom = get_Block_dom_max_subtree_pre_num(def_bl); - bl_info_t *bli = get_block_info(lv, bl); - int i; - - DBG((lv->dbg, LEVEL_2, "lv end check of %+F, def=%+F,%d != q=%+F,%d\n", - var, def_bl, min_dom - 1, bl, bli->id)); - - foreach_out_edge (var, edge) { - ir_node *user = get_edge_src_irn(edge); - ir_node *use_bl; - bl_info_t *bi; - - if (!is_liveness_node(user)) - continue; - - stat_ev_cnt_inc(uses); - use_bl = get_nodes_block(user); - if (is_Phi(user)) { - int pos = get_edge_src_pos(edge); - use_bl = get_Block_cfgpred_block(use_bl, pos); - - if (bl == use_bl) - res |= lv_chk_state_end; - } - - bi = get_block_info(lv, use_bl); - if (use_bl != bl || bitset_is_set(lv->back_edge_tgt, bi->id)) - bitset_set(uses, bi->id); - } - - DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses)); - - bitset_copy(tmp, bli->be_tgt_reach); - bitset_set(tmp, bli->id); - - DBG((lv->dbg, LEVEL_2, "\tbe tgt reach + current: %B, dom span: [%d, %d]\n", tmp, min_dom, max_dom)); - for (i = bitset_next_set(tmp, min_dom); i >= 0 && i <= max_dom; i = bitset_next_set(tmp, i + 1)) { - bl_info_t *ti = lv->map[i]; - DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable)); - if (bitset_intersect(ti->red_reachable, uses)) { - res = lv_chk_state_out | lv_chk_state_end; - goto end; - } - - bitset_andnot(tmp, ti->red_reachable); - } - } - -end: - return res; -} - -/** - * Check a nodes liveness situation of a block. - * This routine considers both cases, the live in and end/out case. - * - * @param lv The liveness check environment. - * @param bl The block under investigation. - * @param var The node to check for. - * @return A bitmask of lv_chk_state_XXX fields. - */ -unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var) -{ stat_ev_cnt_decl(uses); stat_ev_cnt_decl(iter); - int res = 0; - ir_node *def_bl; - assert(is_Block(bl) && "can only check for liveness in a block"); /* If the variable ist no liveness related var, bail out. */ if (!is_liveness_node(var)) return 0; - stat_ev_ctx_push_fobj("node", var); - stat_ev("lv_chk"); + stat_ev_ctx_push_fmt("lv_chk", "%u", get_irn_idx(var)); + stat_ev_tim_push(); /* If there is no dominance relation, go out, too */ def_bl = get_nodes_block(var); @@ -513,8 +323,6 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var * the algorithm is simple. Just check for uses not inside this block. */ if (def_bl == bl) { - const ir_edge_t *edge; - stat_ev("lv_chk_def_block"); DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl)); foreach_out_edge (var, edge) { @@ -550,17 +358,17 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var * We try to gather as much information as possible during looking * at the uses. * - * Note that we know for shure that bl != def_bl. That is sometimes + * Note that we know for sure that bl != def_bl. That is sometimes * silently exploited below. */ else { - bitset_t *tmp = bitset_alloca(lv->n_blocks); - bitset_t *uses = bitset_alloca(lv->n_blocks); bl_info_t *def = get_block_info(lv, def_bl); bl_info_t *bli = get_block_info(lv, bl); + bitset_t *uses = bitset_alloca(lv->n_blocks); + bitset_t *Tq; - int i, min_dom, max_dom; - const ir_edge_t *edge; + size_t i; + unsigned min_dom, max_dom; /* if the block has no DFS info, it cannot be reached. * This can happen in functions with endless loops. @@ -571,6 +379,7 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var if (!bli) goto end; + (void) def; DBG((lv->dbg, LEVEL_2, "lv check %+F (def in %+F #%d) in different block %+F #%d\n", var, def_bl, def->id, bl, bli->id)); @@ -607,8 +416,9 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var res |= mask; bi = get_block_info(lv, use_bl); - bitset_set(uses, bi->id); + if (bi) + bitset_set(uses, bi->id); } /* get the dominance range which really matters. all uses outside @@ -622,29 +432,27 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var /* prepare a set with all reachable back edge targets. * this will determine our "looking points" from where - * we will search/find the calculated uses. - * - * Since there might be no reachable back edge targets - * we add the current block also since reachability of - * uses are then checked from there. */ - bitset_copy(tmp, bli->be_tgt_reach); - bitset_set (tmp, bli->id); + * we will search/find the calculated uses. */ + Tq = bli->be_tgt_reach; /* now, visit all viewing points in the temporary bitset lying * in the dominance range of the variable. Note that for reducible * flow-graphs the first iteration is sufficient and the loop * will be left. */ - DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", tmp, min_dom, max_dom)); - for (i = bitset_next_set(tmp, min_dom); i >= 0 && i <= max_dom; i = bitset_next_set(tmp, i + 1)) { + DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", Tq, min_dom, max_dom)); + i = bitset_next_set(Tq, min_dom); + while (i <= max_dom) { bl_info_t *ti = lv->map[i]; int use_in_current_block = bitset_is_set(uses, ti->id); + stat_ev_cnt_inc(iter); + /* - * This is somehat tricky. Since this routine handles both, live in + * This is somewhat tricky. Since this routine handles both, live in * and end/out we have to handle all the border cases correctly. * Each node is in its own red_reachable set (see calculation * function above). That means, that in the case where bl == t, the - * intersection check of uses and rechability below will always + * intersection check of uses and reachability below will always * find an intersection, namely t. * * However, if a block contains a use and the variable is dead @@ -670,8 +478,6 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var goto end; } - bitset_andnot(tmp, ti->red_reachable); - /* * if we deleted a use do to the commentary above, we have to * re-add it since it might be visible from further view points @@ -679,14 +485,17 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var */ if (use_in_current_block) bitset_set(uses, ti->id); + + i = bitset_next_set(Tq, get_Block_dom_max_subtree_pre_num(ti->block) + 1); } } end: + stat_ev_tim_pop("lv_chk_query_time"); stat_ev_cnt_done(uses, "lv_chk_uses"); stat_ev_cnt_done(iter, "lv_chk_iter"); - stat_ev_ctx_pop(); + stat_ev_ctx_pop("lv_chk"); return res; }