+ mark_live_in(lv, use_block, irn);
+
+ for(i = 0, n = get_Block_n_cfgpreds(use_block); i < n; ++i) {
+ ir_node *pred_block = get_Block_cfgpred_block(use_block, i);
+ live_end_at_block(lv, irn, pred_block, visited, 1);
+ }
+ }
+ }
+}
+
+static void lv_remove_irn_walker(ir_node *bl, void *data)
+{
+ lv_remove_walker_t *w = data;
+ be_lv_remove(w->lv, bl, w->irn);
+}
+
+static const char *lv_flags_to_str(unsigned flags)
+{
+ static const char *states[] = {
+ "---",
+ "i--",
+ "-e-",
+ "ie-",
+ "--o",
+ "i-o",
+ "-eo",
+ "ieo"
+ };
+
+ return states[flags & 7];
+}
+
+static void lv_dump_block(void *context, FILE *f, const ir_node *bl)
+{
+ if(is_Block(bl)) {
+ be_lv_t *lv = context;
+ struct _be_lv_info_t *info = phase_get_irn_data(&lv->ph, bl);
+
+ fprintf(f, "liveness:\n");
+ if(info) {
+ unsigned n = info[0].u.head.n_members;
+ unsigned i;
+
+ for(i = 0; i < n; ++i) {
+ struct _be_lv_info_node_t *n = &info[i+1].u.node;
+ ir_fprintf(f, "%s %+F\n", lv_flags_to_str(n->flags), get_idx_irn(lv->irg, n->idx));
+ }
+ }
+ }
+}
+
+static void *lv_phase_data_init(ir_phase *phase, const ir_node *irn, void *old)
+{
+ struct _be_lv_info_t *info = phase_alloc(phase, LV_STD_SIZE * sizeof(info[0]));
+ (void) irn;
+ (void) old;
+
+ memset(info, 0, LV_STD_SIZE * sizeof(info[0]));
+ info[0].u.head.n_size = LV_STD_SIZE - 1;
+ return info;
+}
+
+static void collect_nodes(ir_node *irn, void *data)
+{
+ struct obstack *obst = data;
+ if (is_liveness_node(irn))
+ obstack_ptr_grow(obst, irn);
+}
+
+static int node_idx_cmp(const void *a, const void *b)
+{
+ const ir_node *p = *(ir_node **) a;
+ const ir_node *q = *(ir_node **) b;
+ int ia = get_irn_idx(p);
+ int ib = get_irn_idx(q);
+ return ia - ib;
+}
+
+static void compute_liveness(be_lv_t *lv)
+{
+ struct obstack obst;
+ struct _lv_walker_t w;
+ ir_node **nodes;
+ int i, n;
+
+ stat_ev_tim_push();
+ obstack_init(&obst);
+ irg_walk_graph(lv->irg, collect_nodes, NULL, &obst);
+ n = obstack_object_size(&obst) / sizeof(nodes[0]);
+ nodes = obstack_finish(&obst);
+
+ /*
+ * inserting the variables sorted by their ID is probably
+ * more efficient since the binary sorted set insertion
+ * will not need to move arounf the data.
+ * However, if sorting the variables a priori pays off
+ * needs to be checked, hence the define.
+ */
+#ifdef LV_COMPUTE_SORTED
+ qsort(nodes, n, sizeof(nodes[0]), node_idx_cmp);
+#endif
+
+ w.lv = lv;
+ w.data = bitset_obstack_alloc(&obst, get_irg_last_idx(lv->irg));
+
+ for (i = 0; i < n; ++i)
+ liveness_for_node(nodes[i], &w);
+
+ obstack_free(&obst, NULL);
+ register_hook(hook_node_info, &lv->hook_info);
+ stat_ev_tim_pop("be_lv_sets_cons");
+}
+
+void be_liveness_assure_sets(be_lv_t *lv)
+{
+ if (!lv->nodes) {
+ lv->nodes = bitset_malloc(2 * get_irg_last_idx(lv->irg));
+ phase_init(&lv->ph, "liveness", lv->irg, PHASE_DEFAULT_GROWTH, lv_phase_data_init, NULL);
+ compute_liveness(lv);
+ /* be_live_chk_compare(lv, lv->lvc); */
+ }
+}
+
+void be_liveness_assure_chk(be_lv_t *lv)
+{
+#ifndef USE_LIVE_CHK
+ be_liveness_assure_sets(lv);
+#else
+ (void) lv;
+#endif
+}
+
+void be_liveness_invalidate(be_lv_t *lv)
+{
+ if (lv && lv->nodes) {
+ unregister_hook(hook_node_info, &lv->hook_info);
+ phase_free(&lv->ph);
+ bitset_free(lv->nodes);
+ lv->nodes = NULL;
+ }
+}
+
+/* Compute the inter block liveness for a graph. */
+be_lv_t *be_liveness(const be_irg_t *birg)
+{
+ be_lv_t *lv = xmalloc(sizeof(lv[0]));
+
+ memset(lv, 0, sizeof(lv[0]));
+ lv->irg = be_get_birg_irg(birg);
+ lv->birg = birg;
+#ifdef USE_LIVE_CHK
+ lv->dfs = dfs_new(&absgraph_irg_cfg_succ, lv->irg);
+ lv->lvc = lv_chk_new(lv->irg, lv->dfs);
+#endif
+ lv->hook_info.context = lv;
+ lv->hook_info.hook._hook_node_info = lv_dump_block;
+
+ return lv;
+}
+
+void be_liveness_recompute(be_lv_t *lv)
+{
+ unsigned last_idx = get_irg_last_idx(lv->irg);
+ if(last_idx >= bitset_size(lv->nodes)) {
+ bitset_free(lv->nodes);
+ lv->nodes = bitset_malloc(last_idx * 2);
+ }
+
+ else
+ bitset_clear_all(lv->nodes);
+
+ phase_free(&lv->ph);
+ phase_init(&lv->ph, "liveness", lv->irg, PHASE_DEFAULT_GROWTH, lv_phase_data_init, NULL);
+ compute_liveness(lv);
+}
+
+
+void be_liveness_free(be_lv_t *lv)
+{
+ be_liveness_invalidate(lv);
+ free(lv);
+}
+
+void be_liveness_remove(be_lv_t *lv, const ir_node *irn)
+{
+ if (lv->nodes) {
+ unsigned idx = get_irn_idx(irn);
+ lv_remove_walker_t w;
+
+ /*
+ * Removes a single irn from the liveness information.
+ * Since an irn can only be live at blocks dominated by the block of its
+ * definition, we only have to process that dominance subtree.
+ */
+ w.lv = lv;
+ w.irn = irn;
+ dom_tree_walk(get_nodes_block(irn), lv_remove_irn_walker, NULL, &w);
+ if(idx < bitset_size(lv->nodes))
+ bitset_clear(lv->nodes, idx);
+ }
+}
+
+void be_liveness_introduce(be_lv_t *lv, ir_node *irn)
+{
+ if (lv->nodes) {
+ struct _lv_walker_t w;
+ w.lv = lv;
+ w.data = bitset_malloc(get_irg_last_idx(lv->irg));
+ liveness_for_node(irn, &w);
+ bitset_free(w.data);
+ }
+}
+
+void be_liveness_update(be_lv_t *lv, ir_node *irn)
+{
+ be_liveness_remove(lv, irn);
+ be_liveness_introduce(lv, irn);
+}
+
+static void lv_check_walker(ir_node *bl, void *data)
+{
+ struct _lv_walker_t *w = data;
+ be_lv_t *lv = w->lv;
+ be_lv_t *fresh = w->data;
+
+ struct _be_lv_info_t *curr = phase_get_irn_data(&lv->ph, bl);
+ struct _be_lv_info_t *fr = phase_get_irn_data(&fresh->ph, bl);
+
+ if(!fr && curr && curr[0].u.head.n_members > 0) {
+ unsigned i;
+
+ ir_fprintf(stderr, "%+F liveness should be empty but current liveness contains:\n", bl);
+ for(i = 0; i < curr[0].u.head.n_members; ++i) {
+ ir_fprintf(stderr, "\t%+F\n", get_idx_irn(lv->irg, curr[1 + i].u.node.idx));
+ }
+ }
+
+ else if(curr) {
+ unsigned n_curr = curr[0].u.head.n_members;
+ unsigned n_fresh = fr[0].u.head.n_members;
+
+ unsigned i;