added a few benchmarks/testapps from http://shootout.alioth.debian.org
[libfirm] / ir / be / beirgmod.c
index 0715716..45db095 100644 (file)
@@ -263,10 +263,10 @@ static ir_node *search_def(ir_node *usage, int pos, pset *copies, pset *copy_blo
                if(pset_find_ptr(phi_blocks, curr_bl)) {
                        ir_node *phi = get_irn_link(curr_bl);
 
-                       if(!phi) {
+                       if(phi == NULL) {
                                int i, n_preds = get_irn_arity(curr_bl);
                                ir_graph *irg = get_irn_irg(curr_bl);
-                               ir_node **ins = xmalloc(n_preds * sizeof(ins[0]));
+                               ir_node **ins = alloca(n_preds * sizeof(ins[0]));
 
                                for(i = 0; i < n_preds; ++i)
                                        ins[i] = new_r_Bad(irg);
@@ -275,8 +275,8 @@ static ir_node *search_def(ir_node *usage, int pos, pset *copies, pset *copy_blo
                                DBG((dbg, LEVEL_2, "\tcreating phi %+F in %+F\n", phi, curr_bl));
 
                                set_irn_link(curr_bl, phi);
-                               sched_add_after(curr_bl, phi);
-                               free(ins);
+                               if(mode != mode_M)
+                                       sched_add_after(curr_bl, phi);
 
                                for(i = 0; i < n_preds; ++i) {
                                        ir_node *arg = search_def(phi, i, copies, copy_blocks, phis, phi_blocks, mode);
@@ -284,7 +284,7 @@ static ir_node *search_def(ir_node *usage, int pos, pset *copies, pset *copy_blo
                                        set_irn_n(phi, i, arg);
                                }
 
-                               if(phis)
+                               if(phis != NULL)
                                        pset_insert_ptr(phis, phi);
                        }
 
@@ -354,6 +354,7 @@ static void fix_usages(pset *copies, pset *copy_blocks, pset *phi_blocks, pset *
        obstack_free(&obst, NULL);
 }
 
+#if 0
 /**
  * Remove phis which are not necessary.
  * During place_phi_functions() phi functions are put on the dominance
@@ -366,26 +367,32 @@ static void fix_usages(pset *copies, pset *copy_blocks, pset *phi_blocks, pset *
  */
 static void remove_odd_phis(pset *copies, pset *unused_copies)
 {
-  ir_node *irn;
+       ir_node *irn;
 
-  for(irn = pset_first(copies); irn; irn = pset_next(copies)) {
-    if(is_Phi(irn)) {
-      int i, n;
-      int illegal = 0;
+       for(irn = pset_first(copies); irn; irn = pset_next(copies)) {
+               if(is_Phi(irn)) {
+                       int i, n;
+                       int illegal = 0;
 
-      assert(sched_is_scheduled(irn) && "phi must be scheduled");
-      for(i = 0, n = get_irn_arity(irn); i < n && !illegal; ++i)
-        illegal = get_irn_n(irn, i) == NULL;
+                       assert(sched_is_scheduled(irn) && "phi must be scheduled");
+                       for(i = 0, n = get_irn_arity(irn); i < n && !illegal; ++i)
+                               illegal = get_irn_n(irn, i) == NULL;
 
-      if(illegal)
-        sched_remove(irn);
-    }
-  }
+                       if(illegal) {
+                               for(i = 0, n = get_irn_arity(irn); i < n; ++i)
+                                       set_irn_n(irn, i, new_Bad());
+                               sched_remove(irn);
+                       }
+               }
+       }
 
-  for(irn = pset_first(unused_copies); irn; irn = pset_next(unused_copies)) {
+       for(irn = pset_first(unused_copies); irn; irn = pset_next(unused_copies)) {
+               for(i = 0, n = get_irn_arity(irn); i < n; ++i)
+                       set_irn_n(irn, i, new_Bad());
                sched_remove(irn);
        }
 }
+#endif
 
 void be_ssa_constr_phis_ignore(dom_front_info_t *info, be_lv_t *lv, int n, ir_node *nodes[], pset *phis, pset *ignore_uses)
 {
@@ -578,7 +585,6 @@ static void elr_split_walker(ir_node *bl, void *data)
 
        for(insn = be_scan_insn(&ie, sched_first(bl)); !is_Block(insn->irn); insn = be_scan_insn(&ie, insn->next_insn)) {
                ir_node *pred = sched_prev(insn->irn);
-               ir_printf("curr: %+F next: %+F, prev: %+F\n", insn->irn, insn->next_insn, pred);
                if(!is_Block(pred) && !is_Phi(insn->irn))
                        insert_Perm_after(aenv, cenv->lv, cenv->cls, cenv->dom_front, insn->irn);
        }
@@ -595,3 +601,50 @@ void extreme_liverange_splitting(struct _be_chordal_env_t *cenv)
        be_liveness_recompute(cenv->lv);
        obstack_free(&c.obst, NULL);
 }
+
+static void remove_empty_block(ir_node *block, void *data) {
+       ir_graph *irg;
+       const ir_edge_t *edge, *next;
+       ir_node *node;
+       ir_node *jump = NULL;
+
+       assert(is_Block(block));
+
+       if(get_Block_n_cfgpreds(block) != 1)
+               return;
+
+       sched_foreach(block, node) {
+               if(!is_Jmp(node))
+                       return;
+               if(jump != NULL) {
+                       // we should never have 2 jumps in a block
+                       assert(0);
+                       return;
+               }
+               jump = node;
+       }
+       if(jump == NULL)
+               return;
+
+       node = get_Block_cfgpred(block, 0);
+       foreach_out_edge_safe(jump, edge, next) {
+               ir_node *block = get_edge_src_irn(edge);
+               int pos = get_edge_src_pos(edge);
+
+               set_irn_n(block, pos, node);
+       }
+
+       set_Block_cfgpred(block, 0, new_Bad());
+       sched_remove(jump);
+
+       irg = get_irn_irg(block);
+       set_irg_doms_inconsistent(irg);
+       set_irg_extblk_inconsistent(irg);
+}
+
+/**
+ * removes basic blocks that just contain a jump instruction
+ */
+void be_remove_empty_blocks(ir_graph *irg) {
+       irg_block_walk_graph(irg, remove_empty_block, NULL, NULL);
+}