1 /* Copyright (C) 1998 - 2000 by Universitaet Karlsruhe
2 ** All rights reserved.
4 ** Author: Christian Schaefer
6 ** Optimizations for a whole ir graph, i.e., a procedure.
12 # include "irnode_t.h"
13 # include "irgraph_t.h"
21 pset *new_identities (void);
22 void del_identities (pset *value_table);
24 /********************************************************************/
25 /* apply optimizations of iropt to all nodes. */
26 /********************************************************************/
29 optimize_in_place_wrapper (ir_node *n, void *env) {
33 /* optimize all sons after recursion, i.e., the sons' sons are
35 for (i = -1; i < get_irn_arity(n); i++) {
36 optimized = optimize_in_place(get_irn_n(n, i));
37 set_irn_n(n, i, optimized);
42 local_optimize_graph (ir_graph *irg) {
43 ir_graph *rem = current_ir_graph;
44 current_ir_graph = irg;
46 /* Should we clean the value_table in irg for the cse? Better do so... */
47 del_identities(irg->value_table);
48 irg->value_table = new_identities();
50 /* walk over the graph */
51 irg_walk(irg->end, NULL, optimize_in_place_wrapper, NULL);
53 current_ir_graph = rem;
56 /********************************************************************/
57 /* Routines for dead node elimination / copying garbage collection */
59 /********************************************************************/
61 /* Remeber the new node in the old node by using a field all nodes have. */
63 set_new_node (ir_node *old, ir_node *new)
68 /* Get this new node, before the old node is forgotton.*/
70 get_new_node (ir_node * n)
76 /* We use the block_visited flag to mark that we have computed the
77 number of useful predecessors for this block.
78 Further we encode the new arity in this flag. Remembering the arity is
79 useful, as it saves a lot of pointer accesses. This function is called
80 for all Phi and Block nodes in a Block. */
82 compute_new_arity(ir_node *b) {
86 irg_v = get_irg_block_visited(current_ir_graph);
87 block_v = get_Block_block_visited(b);
88 if (block_v >= irg_v) {
89 /* we computed the number of preds for this block and saved it in the
91 return block_v - irg_v;
93 /* compute the number of good predecessors */
94 res = get_irn_arity(b);
95 for (i = 0; i < get_irn_arity(b); i++)
96 if (get_irn_opcode(get_irn_n(b, i)) == iro_Bad) res--;
97 /* save it in the flag. */
98 set_Block_block_visited(b, irg_v + res);
103 /* Copies the node to the new obstack. The Ins of the new node point to
104 the predecessors on the old obstack. n->link points to the new node.
105 For Phi and Block nodes the function allocates in-arrays with an arity
106 only for useful predecessors. The arity is determined by counting
107 the non-bad predecessors of the block. */
109 copy_node (ir_node *n, void *env) {
113 if (get_irn_opcode(n) == iro_Block) {
115 new_arity = compute_new_arity(n);
117 block = get_nodes_Block(n);
118 if (get_irn_opcode(n) == iro_Phi) {
119 new_arity = compute_new_arity(block);
121 new_arity = get_irn_arity(n);
124 nn = new_ir_node(current_ir_graph,
130 /* Copy the attributes. These might point to additional data. If this
131 was allocated on the old obstack the pointers now are dangling. This
132 frees e.g. the memory of the graph_arr allocated in new_immBlock. */
137 /* Copies new predecessors of old node to new node remembered in link.
138 Spare the Bad predecessors of Phi and Block nodes. */
140 copy_preds (ir_node *n, void *env) {
141 ir_node *nn, *block, *on;
144 nn = get_new_node(n);
146 if (get_irn_opcode(n) == iro_Block) {
147 /* Don't copy Bad nodes. */
149 for (i = 0; i < get_irn_arity(n); i++)
150 if (get_irn_opcode(get_irn_n(n, i)) != iro_Bad) {
151 set_irn_n (nn, j, get_new_node(get_irn_n(n, i)));
154 /* repair the block visited flag from above misuse */
155 set_Block_block_visited(nn, 0);
156 /* Local optimization could not merge two subsequent blocks if
157 in array contained Bads. Now it's possible. */
158 on = optimize_in_place(nn);
159 if (nn != on) exchange(nn, on);
160 } else if (get_irn_opcode(n) == iro_Phi) {
161 /* Don't copy node if corresponding predecessor in block is Bad.
162 The Block itself should not be Bad. */
163 block = get_nodes_Block(n);
164 set_irn_n (nn, -1, get_new_node(block));
166 for (i = 0; i < get_irn_arity(n); i++)
167 if (get_irn_opcode(get_irn_n(block, i)) != iro_Bad) {
168 set_irn_n (nn, j, get_new_node(get_irn_n(n, i)));
171 /* Compacting the Phi's ins might generate Phis with only one
173 if (get_irn_arity(n) == 1)
174 exchange(n, get_irn_n(n, 0));
176 for (i = -1; i < get_irn_arity(n); i++)
177 set_irn_n (nn, i, get_new_node(get_irn_n(n, i)));
181 /* Copies the graph reachable from current_ir_graph->end to the obstack
183 Then fixes the fields in current_ir_graph containing nodes of the
187 /* Not all nodes remembered in current_ir_graph might be reachable
188 from the end node. Assure their link is set to NULL so that
189 we can test whether new nodes have been computed. */
190 set_irn_link(get_irg_frame (current_ir_graph), NULL);
191 set_irn_link(get_irg_globals(current_ir_graph), NULL);
192 set_irn_link(get_irg_args (current_ir_graph), NULL);
194 /* we use the block walk flag for removing Bads from Blocks ins. */
195 inc_irg_block_visited(current_ir_graph);
198 irg_walk(get_irg_end(current_ir_graph), copy_node, copy_preds, NULL);
200 /* fix the fields in current_ir_graph */
201 set_irg_end (current_ir_graph, get_new_node(get_irg_end(current_ir_graph)));
202 set_irg_end_block (current_ir_graph, get_new_node(get_irg_end_block(current_ir_graph)));
203 if (get_irn_link(get_irg_frame(current_ir_graph)) == NULL)
204 irg_walk(get_irg_frame(current_ir_graph), copy_node, copy_preds, NULL);
205 if (get_irn_link(get_irg_globals(current_ir_graph)) == NULL)
206 irg_walk(get_irg_globals(current_ir_graph), copy_node, copy_preds, NULL);
207 if (get_irn_link(get_irg_args(current_ir_graph)) == NULL)
208 irg_walk(get_irg_args(current_ir_graph), copy_node, copy_preds, NULL);
209 set_irg_start (current_ir_graph, get_new_node(get_irg_start(current_ir_graph)));
210 set_irg_start_block(current_ir_graph,
211 get_new_node(get_irg_start_block(current_ir_graph)));
212 set_irg_frame (current_ir_graph, get_new_node(get_irg_frame(current_ir_graph)));
213 set_irg_globals(current_ir_graph, get_new_node(get_irg_globals(current_ir_graph)));
214 set_irg_args (current_ir_graph, get_new_node(get_irg_args(current_ir_graph)));
215 if (get_irn_link(get_irg_bad(current_ir_graph)) == NULL) {
216 copy_node(get_irg_bad(current_ir_graph), NULL);
217 copy_preds(get_irg_bad(current_ir_graph), NULL);
219 set_irg_bad(current_ir_graph, get_new_node(get_irg_bad(current_ir_graph)));
223 /* Amroq call this emigrate() */
225 dead_node_elimination(ir_graph *irg) {
227 struct obstack *graveyard_obst = NULL;
228 struct obstack *rebirth_obst = NULL;
230 /* Remember external state of current_ir_graph. */
231 rem = current_ir_graph;
232 current_ir_graph = irg;
234 if (get_optimize() && get_opt_dead_node_elimination()) {
236 /* A quiet place, where the old obstack can rest in peace,
237 until it will be cremated. */
238 graveyard_obst = irg->obst;
240 /* A new obstack, where the reachable nodes will be copied to. */
241 rebirth_obst = (struct obstack *) xmalloc (sizeof (struct obstack));
242 current_ir_graph->obst = rebirth_obst;
243 obstack_init (current_ir_graph->obst);
245 /* Copy the graph from the old to the new obstack */
248 /* Free memory from old unoptimized obstack */
249 obstack_free(graveyard_obst, 0); /* First empty the obstack ... */
250 xfree (graveyard_obst); /* ... then free it. */
253 current_ir_graph = rem;