assert(get_irg_pinned(called_graph) == pinned);
if (get_irg_outs_state(current_ir_graph) == outs_consistent)
set_irg_outs_inconsistent(current_ir_graph);
+ set_irg_loopinfo_inconsistent(current_ir_graph);
/* -- Check preconditions -- */
assert(get_irn_op(call) == op_Call);
Further mark these nodes so that they are not visited by the
copying. */
set_irn_link(get_irg_start(called_graph), pre_call);
- set_irn_visited(get_irg_start(called_graph),
- get_irg_visited(current_ir_graph));
- set_irn_link(get_irg_start_block(called_graph),
- get_nodes_Block(pre_call));
- set_irn_visited(get_irg_start_block(called_graph),
- get_irg_visited(current_ir_graph));
+ set_irn_visited(get_irg_start(called_graph), get_irg_visited(current_ir_graph));
+ set_irn_link(get_irg_start_block(called_graph), get_nodes_Block(pre_call));
+ set_irn_visited(get_irg_start_block(called_graph), get_irg_visited(current_ir_graph));
+ set_irn_link(get_irg_bad(called_graph), get_irg_bad(current_ir_graph));
+ set_irn_visited(get_irg_bad(called_graph), get_irg_visited(current_ir_graph));
/* Initialize for compaction of in arrays */
inc_irg_block_visited(current_ir_graph);
void local_optimize_graph (ir_graph *irg);
/** Performs dead node elimination by copying the ir graph to a new obstack.
-
- Further removes Bad predecesors from Blocks and the corresponding
- inputs to Phi nodes.
- Optimization is only performed if options `optimize' and
- `opt_dead_node_elimination' are set.
- The graph may not be in state phase_building. The outs datasturcture
- is freed, the outs state set to no_outs.
- @todo Change this? -> inconsistent.
-
- Backedge information is conserved.
- Removes old attributes of nodes. Sets link field to NULL.
- Callee information must be freed (irg_callee_info_none).
-
- Attention: the numbers assigned to nodes if the library is compiled for
- development/debugging are not conserved by copying. */
+ *
+ * The major intention of this pass is to free memory occupied by
+ * dead nodes and outdated analyses information. Further this
+ * function removes Bad predecesors from Blocks and the corresponding
+ * inputs to Phi nodes. This opens optmization potential for other
+ * optimizations. Further this phase reduces dead Block<->Jmp
+ * self-cycles to Bad nodes.
+ *
+ * Dead_node_elimination is only performed if options `optimize' and
+ * `opt_dead_node_elimination' are set. The graph may
+ * not be in state phase_building. The outs datasturcture is freed,
+ * the outs state set to no_outs. Backedge information is conserved.
+ * Removes old attributes of nodes. Sets link field to NULL.
+ * Callee information must be freed (irg_callee_info_none).
+ *
+ * Attention: the numbers assigned to nodes if the library is compiled for
+ * development/debugging are not conserved by copying. */
void dead_node_elimination(ir_graph *irg);
/** Removes Bad Bad predecesors from Blocks and the corresponding
- inputs to Phi nodes as in dead_node_elimination but without
- copying the graph.
+ inputs to Phi nodes as in dead_node_elimination but without
+ copying the graph.
- @todo not implemented! */
+ @todo not implemented! / buggy? */
void remove_bad_predecessors(ir_graph *irg);
/** Inlines a method at the given call site.
-
- Removes the call node and splits the basic block the call node
- belongs to. Inserts a copy of the called graph between these nodes.
- Assumes that call is a Call node in current_ir_graph and that
- the type in the Call nodes type attribute is the same as the
- type of the called graph.
- Further it assumes that all Phi nodes in a block of current_ir_graph
- are assembled in a "link" list in the link field of the corresponding
- block nodes. Further assumes that all Proj nodes are in a "link" list
- in the nodes producing the tuple. (This is only an optical feature
- for the graph.) Conserves this feature for the old
- nodes of the graph. This precondition can be established by a call to
- collect_phisprojs(), see irgmod.h.
- Called_graph must be unequal to current_ir_graph. Will not inline
- if they are equal.
- Sets visited masterflag in current_ir_graph to the max of the flag in
- current and called graph.
- Assumes that both, the called and the calling graph are in state
- "pinned".
- It is recommended to call local_optimize_graph after inlining as this
- function leaves a set of obscure Tuple nodes, e.g. a Proj-Tuple-Jmp
- combination as control flow operation.
-
- @param call the call node that should be inlined
- @param called_graph the IR-graph that is called at call
-
- @return zero if method could not be inlined (recursion for instance),
- non-zero if all went ok
-*/
+ *
+ * Removes the call node and splits the basic block the call node
+ * belongs to. Inserts a copy of the called graph between these nodes.
+ * Assumes that call is a Call node in current_ir_graph and that
+ * the type in the Call nodes type attribute is the same as the
+ * type of the called graph.
+ * Further it assumes that all Phi nodes in a block of current_ir_graph
+ * are assembled in a "link" list in the link field of the corresponding
+ * block nodes. Further assumes that all Proj nodes are in a "link" list
+ * in the nodes producing the tuple. (This is only an optical feature
+ * for the graph.) Conserves this feature for the old
+ * nodes of the graph. This precondition can be established by a call to
+ * collect_phisprojs(), see irgmod.h.
+ * As dead_node_elimination this function reduces dead Block<->Jmp
+ * self-cycles to Bad nodes.
+ *
+ * Called_graph must be unequal to current_ir_graph. Will not inline
+ * if they are equal.
+ * Sets visited masterflag in current_ir_graph to the max of the flag in
+ * current and called graph.
+ * Assumes that both, the called and the calling graph are in state
+ * "pinned".
+ * It is recommended to call local_optimize_graph after inlining as this
+ * function leaves a set of obscure Tuple nodes, e.g. a Proj-Tuple-Jmp
+ * combination as control flow operation.
+ *
+ * @param call the call node that should be inlined
+ * @param called_graph the IR-graph that is called at call
+ *
+ * @return zero if method could not be inlined (recursion for instance),
+ * non-zero if all went ok
+**/
int inline_method(ir_node *call, ir_graph *called_graph);
/** Inlines all small methods at call sites where the called address comes
- from a Const node that references the entity representing the called
- method.
- The size argument is a rough measure for the code size of the method:
- Methods where the obstack containing the firm graph is smaller than
- size are inlined. Further only a limited number of calls are inlined.
- If the method contains more than 1024 inlineable calls none will be
- inlined.
- Inlining is only performed if flags `optimize' and `inlineing' are set.
- The graph may not be in state phase_building.
- It is recommended to call local_optimize_graph after inlining as this
- function leaves a set of obscure Tuple nodes, e.g. a Proj-Tuple-Jmp
- combination as control flow operation. */
+ * from a Const node that references the entity representing the called
+ * method.
+ * The size argument is a rough measure for the code size of the method:
+ * Methods where the obstack containing the firm graph is smaller than
+ * size are inlined. Further only a limited number of calls are inlined.
+ * If the method contains more than 1024 inlineable calls none will be
+ * inlined.
+ * Inlining is only performed if flags `optimize' and `inlineing' are set.
+ * The graph may not be in state phase_building.
+ * It is recommended to call local_optimize_graph after inlining as this
+ * function leaves a set of obscure Tuple nodes, e.g. a Proj-Tuple-Jmp
+ * combination as control flow operation. */
void inline_small_irgs(ir_graph *irg, int size);