X-Git-Url: http://nsz.repo.hu/git/?a=blobdiff_plain;f=ir%2Fbe%2Fbelive.c;h=e9e578339587d060129e52a07b0d0903eb26ee09;hb=70481aa342e22f5f285dc863b366a56393d888af;hp=644316e65705cbc4a431fbedebd51282dc494b7b;hpb=1ce363f80e6a204d4011f85813362d9bd1d0e7e4;p=libfirm diff --git a/ir/be/belive.c b/ir/be/belive.c index 644316e65..e9e578339 100644 --- a/ir/be/belive.c +++ b/ir/be/belive.c @@ -24,9 +24,10 @@ * @date 06.12.2004 * @version $Id$ */ -#ifdef HAVE_CONFIG_H #include "config.h" -#endif + +/* statev is expensive here, only enable when needed */ +#define DISABLE_STATEV #include "impl.h" #include "iredges_t.h" @@ -525,7 +526,7 @@ static void compute_liveness(be_lv_t *lv) /* * inserting the variables sorted by their ID is probably * more efficient since the binary sorted set insertion - * will not need to move arounf the data. + * will not need to move around the data. * However, if sorting the variables a priori pays off * needs to be checked, hence the define. */ @@ -547,17 +548,23 @@ static void compute_liveness(be_lv_t *lv) void be_liveness_assure_sets(be_lv_t *lv) { if (!lv->nodes) { + BE_TIMER_PUSH(t_live); + lv->nodes = bitset_malloc(2 * get_irg_last_idx(lv->irg)); phase_init(&lv->ph, "liveness", lv->irg, PHASE_DEFAULT_GROWTH, lv_phase_data_init, NULL); compute_liveness(lv); /* be_live_chk_compare(lv, lv->lvc); */ + + BE_TIMER_POP(t_live); } } void be_liveness_assure_chk(be_lv_t *lv) { #ifndef USE_LIVE_CHK + BE_TIMER_PUSH(t_verify); be_liveness_assure_sets(lv); + BE_TIMER_POP(t_verify); #else (void) lv; #endif @@ -576,9 +583,8 @@ void be_liveness_invalidate(be_lv_t *lv) /* Compute the inter block liveness for a graph. */ be_lv_t *be_liveness(const be_irg_t *birg) { - be_lv_t *lv = xmalloc(sizeof(lv[0])); + be_lv_t *lv = XMALLOCZ(be_lv_t); - memset(lv, 0, sizeof(lv[0])); lv->irg = be_get_birg_irg(birg); lv->birg = birg; #ifdef USE_LIVE_CHK @@ -593,25 +599,32 @@ be_lv_t *be_liveness(const be_irg_t *birg) void be_liveness_recompute(be_lv_t *lv) { - unsigned last_idx = get_irg_last_idx(lv->irg); + unsigned last_idx; + + BE_TIMER_PUSH(t_live); + last_idx = get_irg_last_idx(lv->irg); if(last_idx >= bitset_size(lv->nodes)) { bitset_free(lv->nodes); lv->nodes = bitset_malloc(last_idx * 2); - } - - else + } else bitset_clear_all(lv->nodes); phase_free(&lv->ph); phase_init(&lv->ph, "liveness", lv->irg, PHASE_DEFAULT_GROWTH, lv_phase_data_init, NULL); compute_liveness(lv); + + BE_TIMER_POP(t_live); } void be_liveness_free(be_lv_t *lv) { be_liveness_invalidate(lv); - free(lv); +#ifdef USE_LIVE_CHK + lv_chk_free(lv->lvc); + dfs_free(lv->dfs); +#endif + xfree(lv); } void be_liveness_remove(be_lv_t *lv, const ir_node *irn) @@ -775,8 +788,7 @@ int be_check_dominance(ir_graph *irg) return !problem_found; } -void be_liveness_transfer(const arch_env_t *arch_env, - const arch_register_class_t *cls, +void be_liveness_transfer(const arch_register_class_t *cls, ir_node *node, ir_nodeset_t *nodeset) { int i, arity; @@ -791,13 +803,11 @@ void be_liveness_transfer(const arch_env_t *arch_env, foreach_out_edge(node, edge) { ir_node *proj = get_edge_src_irn(edge); - if (arch_irn_consider_in_reg_alloc(arch_env, cls, proj)) { + if (arch_irn_consider_in_reg_alloc(cls, proj)) { ir_nodeset_remove(nodeset, proj); } } - } - - if (arch_irn_consider_in_reg_alloc(arch_env, cls, node)) { + } else if (arch_irn_consider_in_reg_alloc(cls, node)) { ir_nodeset_remove(nodeset, node); } @@ -805,14 +815,14 @@ void be_liveness_transfer(const arch_env_t *arch_env, for (i = 0; i < arity; ++i) { ir_node *op = get_irn_n(node, i); - if (arch_irn_consider_in_reg_alloc(arch_env, cls, op)) + if (arch_irn_consider_in_reg_alloc(cls, op)) ir_nodeset_insert(nodeset, op); } } -void be_liveness_end_of_block(const be_lv_t *lv, const arch_env_t *arch_env, +void be_liveness_end_of_block(const be_lv_t *lv, const arch_register_class_t *cls, const ir_node *block, ir_nodeset_t *live) { @@ -821,7 +831,7 @@ void be_liveness_end_of_block(const be_lv_t *lv, const arch_env_t *arch_env, assert(lv->nodes && "live sets must be computed"); be_lv_foreach(lv, block, be_lv_state_end, i) { ir_node *node = be_lv_get_irn(lv, block, i); - if(!arch_irn_consider_in_reg_alloc(arch_env, cls, node)) + if (!arch_irn_consider_in_reg_alloc(cls, node)) continue; ir_nodeset_insert(live, node); @@ -830,14 +840,14 @@ void be_liveness_end_of_block(const be_lv_t *lv, const arch_env_t *arch_env, -void be_liveness_nodes_live_at(const be_lv_t *lv, const arch_env_t *arch_env, +void be_liveness_nodes_live_at(const be_lv_t *lv, const arch_register_class_t *cls, const ir_node *pos, ir_nodeset_t *live) { const ir_node *bl = is_Block(pos) ? pos : get_nodes_block(pos); ir_node *irn; - be_liveness_end_of_block(lv, arch_env, cls, bl, live); + be_liveness_end_of_block(lv, cls, bl, live); sched_foreach_reverse(bl, irn) { /* * If we encounter the node we want to insert the Perm after, @@ -846,24 +856,7 @@ void be_liveness_nodes_live_at(const be_lv_t *lv, const arch_env_t *arch_env, if(irn == pos) return; - be_liveness_transfer(arch_env, cls, irn, live); - } -} - -void be_liveness_nodes_live_at_input(const be_lv_t *lv, - const arch_env_t *arch_env, - const arch_register_class_t *cls, - const ir_node *pos, ir_nodeset_t *live) -{ - const ir_node *bl = is_Block(pos) ? pos : get_nodes_block(pos); - ir_node *irn; - - assert(lv->nodes && "live sets must be computed"); - be_liveness_end_of_block(lv, arch_env, cls, bl, live); - sched_foreach_reverse(bl, irn) { - be_liveness_transfer(arch_env, cls, irn, live); - if(irn == pos) - return; + be_liveness_transfer(cls, irn, live); } }