+ *
+ * @param block the block
+ * @param ctx the walker environment
+ */
+static void insert_nodes_walker(ir_node *block, void *ctx)
+{
+ pre_env *env = (pre_env*)ctx;
+ int arity = get_irn_arity(block);
+ ir_node *value;
+ ir_node *expr;
+ block_info *info;
+ ir_node *idom;
+ int pos;
+ ir_valueset_iterator_t iter;
+
+ /* only blocks */
+ if (! is_Block(block))
+ return;
+
+ /* ensure that even the start block has a new_set */
+ info = get_block_info(block);
+ if (info->new_set)
+ ir_valueset_del(info->new_set);
+ info->new_set = ir_valueset_new(16);
+
+ if (block == env->start_block)
+ return;
+
+ DB((dbg, LEVEL_2, "Insert operation of %+F\n", block));
+
+ idom = get_Block_idom(block);
+ update_new_set(block, idom);
+
+ /* process only path joining blocks */
+ if (arity < 2) {
+ return;
+ }
+
+ /* This is the main reason antic_in is preverred over antic_out;
+ we may iterate over every anticipated value first and not
+ over the predecessor blocks. */
+ foreach_valueset(info->antic_in, value, expr, iter) {
+ ir_mode *mode;
+ ir_node *phi;
+ ir_node **phi_in;
+
+ /* already done? */
+ if (ir_valueset_lookup(info->antic_done, value))
+ continue;
+
+ /* filter phi nodes from antic_in */
+ if (is_Phi(expr))
+ continue;
+
+ DB((dbg, LEVEL_2, "Insert for %+F (value %+F) in %+F\n", expr, value, block));
+
+ /* A value computed in the dominator is totally redundant.
+ Hence we have nothing to insert. */
+ if (ir_valueset_lookup(get_block_info(idom)->avail_out, value)) {
+ DB((dbg, LEVEL_2, "Fully redundant expr %+F value %+F\n", expr, value));
+ DEBUG_ONLY(inc_stats(gvnpre_stats->fully);)
+
+ ir_valueset_insert(info->antic_done, value, expr);
+ continue;
+ }
+
+ if (is_hoisting_greedy(expr, block)) {
+ DB((dbg, LEVEL_2, "greedy\n"));
+ continue;
+ }
+
+ mode = is_partially_redundant(block, expr, value);
+ if (mode == NULL)
+ continue;
+
+#if LOADS || DIVMODS
+ /* save old mode_M phis to remove keepalive edges later */
+ if (is_memop(expr)) {
+ ir_node *mem = get_memop_mem(expr);
+ if (is_Phi(mem) && get_nodes_block(mem) == get_nodes_block(expr)) {
+ ir_nodeset_insert(env->keeps, mem);
+ }
+ }
+#endif
+
+#ifdef DEBUG_libfirm
+ if (! is_Proj(expr)) {
+ if (env->first_iter)
+ inc_stats(gvnpre_stats->first_iter_found);
+ inc_stats(gvnpre_stats->partially);
+ }
+ if (is_Load(expr) || is_Store(expr))
+ inc_stats(gvnpre_stats->loads);
+ else if (is_Div(expr) || is_Mod(expr))
+ inc_stats(gvnpre_stats->divmods);
+#endif
+
+ phi_in = XMALLOCN(ir_node *, arity);
+
+ /* for each predecessor block */
+ for (pos = 0; pos < arity; ++pos) {
+ ir_node *pred_block = get_Block_cfgpred_block(block, pos);
+ block_info *pred_info;
+
+ pred_info = get_block_info(pred_block);
+
+ if (! pred_info->found) {
+ int i;
+ int node_arity = get_irn_arity(expr);
+ ir_node **in = XMALLOCNZ(ir_node *, node_arity);
+ ir_node *trans;
+ ir_node *new_value, *new_value2;
+ ir_node *target_block = pred_block;
+
+ for (i = 0; i < node_arity; ++i) {
+ ir_node *pred = get_irn_n(expr, i);
+ ir_node *value = identify(pred);
+ ir_node *leader;
+ ir_node *trans;
+ ir_node *trans_val;
+ ir_node *avail;
+
+ /* transform knowledge over the predecessor from
+ anti-leader world into leader world. */
+
+ DB((dbg, LEVEL_3, "pred %+F\n", pred));
+ value = identify(pred);
+
+ /* get leader for pred to lookup its translated value */
+ leader = ir_valueset_lookup(info->antic_in, value);
+ if (! leader)
+ leader = pred;
+ DB((dbg, LEVEL_3, "lead %+F\n", leader));
+
+ trans = get_translated(pred_block, leader);
+ if (!trans)
+ trans = pred;
+ DB((dbg, LEVEL_3, "trans %+F\n", trans));
+
+ /* in case of phi, we are done */
+ if (is_Phi(pred) && get_nodes_block(pred) == block) {
+ in[i] = trans;
+ continue;
+ }
+
+ trans_val = identify(trans);
+ DB((dbg, LEVEL_3, "value %+F\n", trans_val));
+
+ /* constants are always available but not in avail set */
+ if (is_irn_constlike(trans_val)) {
+ in[i] = trans;
+ continue;
+ }
+
+ /* use the leader
+ In case of loads we need to make sure the hoisted
+ loads are found despite their unique value. */
+ avail = ir_valueset_lookup(pred_info->avail_out, trans_val);
+ DB((dbg, LEVEL_3, "avail %+F\n", avail));
+
+ assert(avail && "predecessor has to be available");
+ in[i] = avail;
+ }
+
+ if (is_Proj(expr))
+ target_block = get_nodes_block(in[0]);
+
+ /* Copy node to represent the new value.
+ We use translated nodes as value representatives only.
+ They have anti leaders as predecessors, not leaders!
+ So we have to create a new node using leaders. */
+ trans = new_ir_node(
+ get_irn_dbg_info(expr),
+ environ->graph,
+ target_block,
+ get_irn_op(expr),
+ get_irn_mode(expr),
+ get_irn_arity(expr),
+ in);
+ free(in);
+ /* We need the attribute copy here, because the Hash value of a
+ node might depend on it. */
+ copy_node_attr(environ->graph, expr, trans);
+
+ /* value is now available in target block through trans
+ insert (not replace) because it has not been available */
+ new_value = identify_or_remember(trans);
+ ir_valueset_insert(pred_info->avail_out, new_value, trans);
+ DB((dbg, LEVEL_4, "avail%+F+= trans %+F(%+F)\n", pred_block, trans, new_value));
+
+ new_value2 = identify(get_translated(pred_block, expr));
+ ir_valueset_insert(pred_info->avail_out, new_value2, trans);
+ DB((dbg, LEVEL_4, "avail%+F+= trans %+F(%+F)\n", pred_block, trans, new_value2));
+
+ DB((dbg, LEVEL_3, "Use new %+F in %+F because %+F(%+F) not available\n", trans, pred_block, expr, value));
+
+ phi_in[pos] = trans;
+ } else {
+ /* value available */
+ phi_in[pos] = pred_info->avail;
+ }
+ DB((dbg, LEVEL_3, "phi_in %+F\n", phi_in[pos]));
+ }
+
+ /* We do not connect tuples as they will be connected automatically
+ by the corresponding projections. */
+ if (get_irn_mode(expr) != mode_T) {
+
+ phi = new_r_Phi(block, arity, phi_in, mode);
+ DB((dbg, LEVEL_3, "New %+F for redundant %+F created\n", phi, expr));
+
+ /* This value is now available through the new phi.
+ insert || replace in avail_out */
+ ir_valueset_replace(info->avail_out, value, phi);
+ ir_valueset_insert(info->new_set, value, phi);
+ }
+ free(phi_in);
+
+ /* already optimized this value in this block */
+ ir_valueset_insert(info->antic_done, value, expr);
+ env->changes |= 1;
+ }
+}
+
+#if HOIST_HIGH
+static void update_new_set_walker(ir_node *block, void *ctx)
+{
+ pre_env *env = (pre_env*)ctx;
+
+ if (! is_Block(block))
+ return;
+ if (block == env->start_block)
+ return;
+
+ update_new_set(block, get_Block_idom(block));
+}
+
+/**
+ * Domtree block walker to insert nodes with dying operands
+ * into the highest possible block whilst still being anticipated.
+ */
+static void hoist_high(ir_node *block, void *ctx)
+{
+ pre_env *env = (pre_env*)ctx;
+ block_info *curr_info;
+ ir_valueset_iterator_t iter;
+ ir_node *expr;
+ ir_node *value;
+ int arity = get_irn_arity(block);
+
+ if (! is_Block(block))
+ return;
+
+ curr_info = get_block_info(block);
+
+ if (curr_info->new_set)
+ ir_valueset_del(curr_info->new_set);
+ curr_info->new_set = ir_valueset_new(16);
+
+ if (block == env->start_block)
+ return;
+
+ if (arity < 2)
+ return;
+
+ DB((dbg, LEVEL_2, "High hoisting %+F\n", block));
+
+ /* foreach entry optimized by insert node phase */
+ foreach_valueset(curr_info->antic_done, value, expr, iter) {
+ int pos;
+
+ /* TODO currently we cannot handle load and their projections */
+ if (is_memop(expr) || is_Proj(expr))
+ continue;
+
+ DB((dbg, LEVEL_4, "leader %+F value %+F\n", expr, value));
+
+ /* visit hoisted expressions */
+ for (pos = 0; pos < arity; ++pos) {
+ /* standard target is predecessor block */
+ ir_node *target = get_Block_cfgpred_block(block, pos);
+ block_info *pred_info = get_block_info(target);
+ ir_node *avail;
+ ir_node *new_target;
+ ir_node *trans_expr;
+ ir_node *trans_value;
+ ir_node *dom;
+ int avail_arity;
+ int i;
+ unsigned nest_depth;
+ block_info *dom_info;
+
+ /* get phi translated value */
+ trans_expr = get_translated(target, expr);
+ trans_value = identify(trans_expr);
+ avail = (ir_node*)ir_valueset_lookup(pred_info->avail_out, trans_value);
+
+ /* get the used expr on this path */
+
+ /* TODO when does this happen? */
+ if (avail == NULL)
+ continue;
+
+ avail_arity = get_irn_arity(avail);
+ value = identify(avail);
+
+ /* anticipation border */
+ new_target = NULL;
+ nest_depth = get_loop_depth(get_irn_loop(target));
+
+ /* Either push the hoisted nodes up their path,
+ or try to put them directly into their common dominator. */
+#if COMMON_DOM
+ /* By using block (instead of target) as initial block,
+ we only allow hoisting into a common block of
+ both predecessor blocks. */
+ dom = block;
+#else
+ dom = target;
+#endif
+
+ while (dom && dom != get_Block_idom(block)) {
+
+ dom = get_Block_idom(dom);
+ dom_info = get_block_info(dom);
+ DB((dbg, LEVEL_4, "testing dom %+F\n", dom));
+
+ /* TODO Being in antic_in means hoistable above block,
+ but we need 'hoistable into block'.
+ This could be achieved by a flag for each valueset pair,
+ being set during antic computation. */
+
+ /* check if available node ist still anticipated and clean */
+ if (! ir_valueset_lookup(dom_info->antic_in, value)) {
+ DB((dbg, LEVEL_4, "%+F not antic in %+F\n", value, dom));
+ break;
+ }
+
+ nest_depth = get_loop_depth(get_irn_loop(dom));
+
+ /* do not hoist into loops */
+ if (get_loop_depth(get_irn_loop(dom)) > nest_depth) {
+ DB((dbg, LEVEL_4, "%+F deeper nested\n", dom));
+ /* not a suitable location */
+ continue;
+ }
+
+ /* check if operands die */
+
+ /* check for uses on current path */
+ for (i = 0; i < avail_arity; i++) {
+ ir_node *pred = get_irn_n(avail, i);
+ ir_node *pred_value = identify(pred);
+
+ if (dom == NULL)
+ break;
+
+ DB((dbg, LEVEL_4, "testing pred %+F\n", pred));
+
+ if (! ir_valueset_lookup(dom_info->avail_out, pred_value)) {
+ DB((dbg, LEVEL_4, "pred %+F not available\n", pred));
+ dom = NULL;
+ break;
+ }
+
+ /* check every successor */
+ foreach_out_edge(pred, edge) {
+ ir_node *succ = get_edge_src_irn(edge);
+ DB((dbg, LEVEL_4, "testing succ %+F\n", succ));
+
+ /* check only successors on current path to end */
+ if (block_dominates(dom, get_nodes_block(succ))) {
+ ir_node *succ_value = identify(succ);
+
+ /* Do we have another user than avail?
+ Then predecessor is not dead after removal of avail. */
+ if (succ_value != value) {
+ DB((dbg, LEVEL_4, "still used in %+F\n", succ));
+ dom = NULL;
+ break;
+ }
+ }
+ }
+ }
+ if (dom)
+ new_target = dom;
+
+#if COMMON_DOM
+ /* only try common dominator */
+ break;
+#endif
+ }
+
+ /* put node into new target block */
+ if (new_target) {
+ block_info *target_info = get_block_info(new_target);
+ int nn_arity = get_irn_arity(avail);
+ ir_node **in = XMALLOCN(ir_node *, nn_arity);
+ ir_node *nn;
+ int i;
+
+ DB((dbg, LEVEL_2, "Hoisting %+F into %+F\n", avail, new_target));
+ DEBUG_ONLY(inc_stats(gvnpre_stats->hoist_high);)
+
+ for (i = 0; i < nn_arity; ++i) {
+ ir_node *pred = get_irn_n(avail, i);
+ ir_node *avail_pred = ir_valueset_lookup(target_info->avail_out, identify(pred));
+ assert(avail_pred);
+ in[i] = avail_pred;
+ }
+ nn = new_ir_node(
+ get_irn_dbg_info(avail),
+ environ->graph,
+ new_target,
+ get_irn_op(avail),
+ get_irn_mode(avail),
+ nn_arity,
+ in);
+ free(in);
+
+ identify_or_remember(nn);
+ /* TODO Nodes are inserted into a dominating block and should
+ be available from this point on. Currently we do not push
+ the availability information through during the walk. */
+ ir_valueset_insert(target_info->new_set, value, nn);
+ ir_valueset_insert(target_info->avail_out, value, nn);
+ }
+ }
+ }
+}
+#endif
+
+/* --------------------------------------------------------
+ * Elimination of fully redundant nodes
+ * --------------------------------------------------------
+ */
+
+/**
+ * Walker which finds redundant nodes using avail_out sets
+ * and exchanges them for existing ones.
+ * We cannot change the graph here as this would affect
+ * the hash values of the nodes.
+ *
+ * @param irn the node
+ * @param ctx the walker environment