current_ir_graph = rem;
}
+/**
+ * Enqueue all users of a node to a wait queue.
+ * Handles mode_T nodes.
+ */
+static void enqueue_users(ir_node *n, pdeq *waitq)
+{
+ const ir_edge_t *edge;
+
+ foreach_out_edge(n, edge) {
+ ir_node *succ = get_edge_src_irn(edge);
+
+ if (get_irn_link(succ) != waitq) {
+ pdeq_putr(waitq, succ);
+ set_irn_link(succ, waitq);
+ }
+ if (get_irn_mode(succ) == mode_T) {
+ /* A mode_T node has Proj's. Because most optimizations
+ run on the Proj's we have to enqueue them also. */
+ enqueue_users(succ, waitq);
+ }
+ }
+}
+
/**
* Block-Walker: uses dominance depth to mark dead blocks.
*/
static void kill_dead_blocks(ir_node *block, void *env)
{
- (void) env;
+ pdeq *waitq = (pdeq*) env;
if (get_Block_dom_depth(block) < 0) {
/*
* the End block, i.e. it is always reachable from Start
*/
ir_graph *irg = get_irn_irg(block);
- exchange(block, get_irg_bad(irg));
+ enqueue_users(block, waitq);
+ exchange(block, new_r_Bad(irg, mode_BB));
}
}
current_ir_graph = rem;
}
-/**
- * Enqueue all users of a node to a wait queue.
- * Handles mode_T nodes.
- */
-static void enqueue_users(ir_node *n, pdeq *waitq)
-{
- const ir_edge_t *edge;
-
- foreach_out_edge(n, edge) {
- ir_node *succ = get_edge_src_irn(edge);
-
- if (get_irn_link(succ) != waitq) {
- pdeq_putr(waitq, succ);
- set_irn_link(succ, waitq);
- }
- if (get_irn_mode(succ) == mode_T) {
- /* A mode_T node has Proj's. Because most optimizations
- run on the Proj's we have to enqueue them also. */
- enqueue_users(succ, waitq);
- }
- }
-}
-
/**
* Data flow optimization walker.
* Optimizes all nodes and enqueue its users
if (get_opt_global_cse()) {
set_irg_pinned(irg, op_pin_state_floats);
- } else {
- /* The following enables unreachable code elimination (=Blocks may be
- * Bad). We cannot enable it in global_cse nodes since we can't
- * determine a nodes block there and therefore can't remove all code
- * in unreachable blocks */
- set_irg_state(irg, IR_GRAPH_STATE_BAD_BLOCK);
- if (get_irg_dom_state(irg) == dom_consistent)
- irg_block_walk_graph(irg, NULL, kill_dead_blocks, NULL);
}
+ /* The following enables unreachable code elimination (=Blocks may be
+ * Bad). */
+ set_irg_state(irg, IR_GRAPH_STATE_BAD_BLOCK);
+
/* invalidate info */
set_irg_outs_inconsistent(irg);
set_irg_doms_inconsistent(irg);
* so if it's not empty, the graph has been changed */
changed = !pdeq_empty(waitq);
- /* finish the wait queue */
- while (! pdeq_empty(waitq)) {
- ir_node *n = (ir_node*)pdeq_getl(waitq);
- if (! is_Bad(n))
+ do {
+ /* finish the wait queue */
+ while (! pdeq_empty(waitq)) {
+ ir_node *n = (ir_node*)pdeq_getl(waitq);
opt_walker(n, waitq);
- }
+ }
+ /* kill newly generated unreachable code */
+ set_irg_outs_inconsistent(irg);
+ compute_doms(irg);
+ irg_block_walk_graph(irg, NULL, kill_dead_blocks, waitq);
+ } while (! pdeq_empty(waitq));
del_pdeq(waitq);
/* Finally kill BAD and doublets from the keep alives.
Doing this AFTER edges where deactivated saves cycles */
- end = get_irg_end(irg);
+ end = get_irg_end(irg);
remove_End_Bads_and_doublets(end);
clear_irg_state(irg, IR_GRAPH_STATE_BAD_BLOCK);