ia32: Remove the ia32_x87_attr_t from ia32_asm_attr_t.
[libfirm] / ir / ana / irlivechk.c
index 914374f..d4c4efb 100644 (file)
@@ -21,7 +21,6 @@
  * @file    livechk.c
  * @date    21.04.2007
  * @author  Sebastian Hack
- * @version $Id$
  * @brief
  *
  * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
 
 #include <stdio.h>
 
+/* statev is expensive here, only enable when needed */
+#define DISABLE_STATEV
+
 #include "irgraph_t.h"
 #include "irnode_t.h"
-#include "irphase_t.h"
+#include "irnodemap.h"
 #include "iredges_t.h"
 
 #include "irprintf.h"
@@ -56,7 +58,7 @@
 
 #include "irlivechk.h"
 
-#include "statev.h"
+#include "statev_t.h"
 
 typedef struct bl_info_t {
        const ir_node *block;      /**< The block. */
@@ -73,49 +75,30 @@ typedef struct bl_info_t {
                                                                 in the reduced graph. */
 } bl_info_t;
 
-#define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
-
 struct lv_chk_t {
-       ir_phase     ph;
-       const dfs_t *dfs;
-       int          n_blocks;
-       bitset_t    *back_edge_src;
-       bitset_t    *back_edge_tgt;
-       bl_info_t  **map;
+       ir_nodemap     block_infos;
+       struct obstack obst;
+       dfs_t         *dfs;
+       int            n_blocks;
+       bitset_t      *back_edge_src;
+       bitset_t      *back_edge_tgt;
+       bl_info_t    **map;
        DEBUG_ONLY(firm_dbg_module_t *dbg;)
 };
 
-static void *init_block_data(ir_phase *ph, const ir_node *irn)
-{
-       lv_chk_t *lv      = firm_container_of(ph, lv_chk_t, ph);
-       bl_info_t *bi     = (bl_info_t*) phase_alloc(ph, sizeof(bi[0]));
-
-       bi->id            = get_Block_dom_tree_pre_num(irn);
-       bi->block         = irn;
-       bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
-       bi->be_tgt_reach  = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
-       bi->be_tgt_calc   = 0;
-       return bi;
-}
-
-/**
- * Filter function to select all nodes for which liveness is computed.
- * @param irn A node.
- * @return    1 if the node shall be considered in liveness, 0 if not.
- */
-static inline int is_liveness_node(const ir_node *irn)
+static bl_info_t *get_block_info(lv_chk_t *lv, const ir_node *block)
 {
-       switch (get_irn_opcode(irn)) {
-       case iro_Block:
-       case iro_Bad:
-       case iro_End:
-       case iro_Anchor:
-               return 0;
-       default:
-               break;
+       bl_info_t *info = ir_nodemap_get(bl_info_t, &lv->block_infos, block);
+       if (info == NULL) {
+               info                = OALLOC(&lv->obst, bl_info_t);
+               info->id            = get_Block_dom_tree_pre_num(block);
+               info->block         = block;
+               info->red_reachable = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
+               info->be_tgt_reach  = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
+               info->be_tgt_calc   = 0;
+               ir_nodemap_insert(&lv->block_infos, block, info);
        }
-
-       return 1;
+       return info;
 }
 
 /**
@@ -135,8 +118,6 @@ static void red_trans_closure(lv_chk_t *lv)
                const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
                bl_info_t *bi = get_block_info(lv, bl);
 
-               const ir_edge_t *edge;
-
                bitset_set(bi->red_reachable, bi->id);
                foreach_block_succ (bl, edge) {
                        ir_node *succ = get_edge_src_irn(edge);
@@ -168,8 +149,6 @@ static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
        bitset_t *tmp = bitset_alloca(lv->n_blocks);
        bl_info_t *bi = get_block_info(lv, bl);
 
-       size_t elm;
-
        DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id));
 
        /* put all back edge sources reachable (reduced) from here in tmp */
@@ -183,7 +162,6 @@ static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
        /* iterate over them ... */
        bitset_foreach(tmp, elm) {
                bl_info_t *si = lv->map[elm];
-               const ir_edge_t *edge;
 
                /* and find back edge targets which are not reduced reachable from bl */
                foreach_block_succ (si->block, edge) {
@@ -205,7 +183,6 @@ static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
 
 static inline void compute_back_edge_chains(lv_chk_t *lv)
 {
-       size_t elm;
        int i, n;
 
        DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src));
@@ -217,8 +194,6 @@ static inline void compute_back_edge_chains(lv_chk_t *lv)
                const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
                bl_info_t *bi     = get_block_info(lv, bl);
 
-               const ir_edge_t *edge;
-
                if (!bitset_is_set(lv->back_edge_tgt, bi->id)) {
                        foreach_block_succ (bl, edge) {
                                ir_node *succ = get_edge_src_irn(edge);
@@ -240,30 +215,29 @@ static inline void compute_back_edge_chains(lv_chk_t *lv)
        }
 }
 
-lv_chk_t *lv_chk_new(ir_graph *irg, const dfs_t *dfs)
+lv_chk_t *lv_chk_new(ir_graph *irg)
 {
        lv_chk_t *res = XMALLOC(lv_chk_t);
-       struct obstack *obst;
        int i;
 
        assure_doms(irg);
 
        stat_ev_tim_push();
-       phase_init(&res->ph, irg, init_block_data);
-       obst = phase_obst(&res->ph);
+       ir_nodemap_init(&res->block_infos, irg);
+       obstack_init(&res->obst);
 
        FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
 
-       res->dfs           = dfs;
+       res->dfs           = dfs_new(&absgraph_irg_cfg_succ, irg);
        res->n_blocks      = dfs_get_n_nodes(res->dfs);
-       res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
-       res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
-       res->map           = OALLOCNZ(obst, bl_info_t*, res->n_blocks);
+       res->back_edge_src = bitset_obstack_alloc(&res->obst, res->n_blocks);
+       res->back_edge_tgt = bitset_obstack_alloc(&res->obst, res->n_blocks);
+       res->map           = OALLOCNZ(&res->obst, bl_info_t*, res->n_blocks);
 
        /* fill the map which maps pre_num to block infos */
        for (i = res->n_blocks - 1; i >= 0; --i) {
                ir_node *irn  = (ir_node *) dfs_get_pre_num_node(res->dfs, i);
-               bl_info_t *bi = (bl_info_t*) phase_get_or_set_irn_data(&res->ph, irn);
+               bl_info_t *bi = get_block_info(res, irn);
                assert(bi->id < res->n_blocks);
                assert(res->map[bi->id] == NULL);
                res->map[bi->id] = bi;
@@ -295,20 +269,13 @@ lv_chk_t *lv_chk_new(ir_graph *irg, const dfs_t *dfs)
 
 void lv_chk_free(lv_chk_t *lv)
 {
-       phase_deinit(&lv->ph);
+       dfs_free(lv->dfs);
+       obstack_free(&lv->obst, NULL);
+       ir_nodemap_destroy(&lv->block_infos);
        xfree(lv);
 }
 
-/**
- * Check a nodes liveness situation of a block.
- * This routine considers both cases, the live in and end/out case.
- *
- * @param lv   The liveness check environment.
- * @param bl   The block under investigation.
- * @param var  The node to check for.
- * @return     A bitmask of lv_chk_state_XXX fields.
- */
-unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var)
+unsigned lv_chk_bl_xxx(lv_chk_t *lv, const ir_node *bl, const ir_node *var)
 {
        int res  = 0;
        ir_node *def_bl;
@@ -336,8 +303,6 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var
         * the algorithm is simple. Just check for uses not inside this block.
         */
        if (def_bl == bl) {
-               const ir_edge_t *edge;
-
                stat_ev("lv_chk_def_block");
                DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl));
                foreach_out_edge (var, edge) {
@@ -384,7 +349,6 @@ unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *var
 
                size_t i;
                unsigned min_dom, max_dom;
-               const ir_edge_t *edge;
 
                /* if the block has no DFS info, it cannot be reached.
                 * This can happen in functions with endless loops.