never CSE nodes which have no stack entities assigned yet but need them later (this...
[libfirm] / ir / ana / irlivechk_old.c
1 /*
2  * Copyright (C) 1995-2007 Inria Rhone-Alpes.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file    livechk.c
22  * @date    21.04.2007
23  * @author  Sebastian Hack
24  * @version $Id: irlivechk.c 14865 2007-06-30 18:23:05Z matze $
25  * @summary
26  *
27  * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
28  *
29  * The speciality here is, that nothing has to be recomputed if new nodes are created
30  * or old ones deleted.
31  *
32  * This algo has one core routine check_live_end_internal() which performs the liveness check.
33  * It only relies on the precomputation done in the constructor, which in turn needs:
34  * - out edges
35  * - the dominance tree
36  * - data obtained from a depth-first-search
37  *
38  * The precomputation remains valid as long as the CFG is not altered.
39  */
40 #ifdef HAVE_CONFIG_H
41 #include <config.h>
42 #endif
43
44 #include <stdio.h>
45
46 #include "irgraph_t.h"
47 #include "irphase_t.h"
48 #include "iredges_t.h"
49 #include "irprintf.h"
50 #include "irdump.h"
51
52 #include "dfs_t.h"
53 #include "bitset.h"
54 #include "util.h"
55
56 #include "irlivechk_old.h"
57
58 #include "statev.h"
59
60 typedef struct _bl_info_t {
61         ir_node *block;            /**< The block. */
62
63         int id;                    /**< a tight number for the block.
64                                                                  we're just reusing the pre num from
65                                                                  the DFS. */
66
67         bitset_t *red_reachable;   /**< Holds all id's if blocks reachable
68                                                                  in the CFG modulo back edges. */
69
70         bitset_t *be_tgt_reach;    /**< target blocks of back edges whose
71                                                                  sources are reachable from this block
72                                                                  in the reduced graph. */
73
74         bitset_t *be_tgt_dom;      /**< target blocks of back edges which
75                                                                  are dominated by this block. */
76 } bl_info_t;
77
78 #define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
79
80 struct _lv_chk_old_t {
81         ir_phase ph;
82         dfs_t *dfs;
83         DEBUG_ONLY(firm_dbg_module_t *dbg;)
84         int n_blocks;
85         bitset_t *back_edge_src;
86         bitset_t *back_edge_tgt;
87         bl_info_t **map;
88 };
89
90 static void *init_block_data(ir_phase *ph, ir_node *irn, void *old)
91 {
92         lv_chk_old_t *lv  = container_of(ph, lv_chk_old_t, ph);
93         bl_info_t *bi     = phase_alloc(ph, sizeof(bi[0]));
94
95         bi->id            = dfs_get_pre_num(lv->dfs, irn);
96         bi->block         = irn;
97         bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
98         bi->be_tgt_reach  = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
99         bi->be_tgt_dom    = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
100         (void) old;
101         return bi;
102 }
103
104 /**
105  * Filter function to select all nodes for which liveness is computed.
106  * @param irn A node.
107  * @return    1 if the node shall be considered in liveness, 0 if not.
108  */
109 static INLINE int is_liveness_node(const ir_node *irn)
110 {
111         switch(get_irn_opcode(irn)) {
112         case iro_Block:
113         case iro_Bad:
114         case iro_End:
115                 return 0;
116         default:;
117         }
118
119         return 1;
120 }
121
122 /**
123  * Compute the transitive closure on the reduced graph.
124  * The reduced graph is the original graph without back edges.
125  * Since that is a DAG, a reverse post order of the graph gives a toposort
126  * which is ideally suited to compute the transitive closure.
127  * Note also, that the DFS tree of the reduced graph is the same than the one
128  * of the original graph. This saves us computing a new reverse post order.
129  * We also can re-use the DFS tree of the original graph.
130  */
131 static void red_trans_closure(lv_chk_old_t *lv)
132 {
133         int i, n;
134
135         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
136                 ir_node *bl   = dfs_get_post_num_node(lv->dfs, i);
137                 bl_info_t *bi = get_block_info(lv, bl);
138
139                 const ir_edge_t *edge;
140
141                 foreach_block_succ (bl, edge) {
142                         ir_node *succ = get_edge_src_irn(edge);
143                         bl_info_t *si = get_block_info(lv, succ);
144                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
145
146                         /*
147                          * if the successor is no back edge, include all reachable
148                          * blocks from there into the reachable set of the current node
149                          */
150                         if (kind != DFS_EDGE_BACK) {
151                                 assert(dfs_get_post_num(lv->dfs, bl)
152                                                 > dfs_get_post_num(lv->dfs, succ));
153                                 bitset_or(bi->red_reachable, si->red_reachable);
154                                 bitset_set(bi->red_reachable, si->id);
155                         }
156
157                         /* mark the block as a back edge src and succ as back edge tgt. */
158                         else {
159                                 bitset_set(lv->back_edge_src, bi->id);
160                                 bitset_set(lv->back_edge_tgt, si->id);
161                         }
162                 }
163
164         }
165
166 }
167
168 /**
169  * Compute the two back edge sets for each block.
170  * <code>be_tgt_reach</code> contains all target blocks of a back edges reachable from a node.
171  * <code>be_tgt_dom</code> contains all target blocks of back edges strictly dominated
172  * by a node.
173  */
174 static void compute_back_edge_sets(lv_chk_old_t *lv, ir_node *bl)
175 {
176         bl_info_t *bi = phase_get_or_set_irn_data(&(lv)->ph, bl);
177         bitset_t *tmp = bitset_alloca(lv->n_blocks);
178
179         bitset_pos_t elm;
180         ir_node *n;
181
182         dominates_for_each (bl, n) {
183                 bl_info_t *ni = phase_get_or_set_irn_data(&(lv)->ph, n);
184
185                 /* compute information for dominance sub tree */
186                 compute_back_edge_sets(lv, n);
187
188                 /*
189                  * of course all blocks dominated by blocks in the
190                  * subtree are also dominated by bl.
191                  */
192                 bitset_or(bi->be_tgt_dom, ni->be_tgt_dom);
193
194                 /*
195                  * add the immeditate dominee to the back edge tgt dominance
196                  * bitset if it is the target node of a back edge.
197                  */
198                 if (bitset_is_set(lv->back_edge_tgt, ni->id))
199                         bitset_set(bi->be_tgt_dom, ni->id);
200         }
201
202         /*
203          * iterate over all back edge src nodes which are reachable from
204          * this nodes and put the targets of the back edges in the be_tgt_reach
205          * bitset of the node.
206          */
207         bitset_copy(tmp, bi->red_reachable);
208         bitset_set(tmp, bi->id);
209         bitset_and(tmp, lv->back_edge_src);
210         bitset_foreach (tmp, elm) {
211                 ir_node *src = lv->map[elm]->block;
212                 const ir_edge_t *edge;
213
214                 foreach_block_succ (src, edge) {
215                         ir_node *succ        = get_edge_src_irn(edge);
216                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, src, succ);
217
218                         if (kind == DFS_EDGE_BACK) {
219                                 bl_info_t *si = get_block_info(lv, succ);
220                                 bitset_set(bi->be_tgt_reach, si->id);
221                         }
222                 }
223         }
224 }
225
226 lv_chk_old_t *lv_chk_old_new(ir_graph *irg)
227 {
228         lv_chk_old_t *res = xmalloc(sizeof(res[0]));
229         struct obstack *obst;
230         int i;
231
232         phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL);
233         obst = phase_obst(&res->ph);
234
235         FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
236
237         res->dfs           = dfs_new(&absgraph_irg_cfg_succ, irg);
238         res->n_blocks      = dfs_get_n_nodes(res->dfs);
239         res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
240         res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
241         res->map           = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0]));
242
243 #ifdef ENABLE_STATS
244         memset(&res->stat_data, 0, sizeof(res->stat_data));
245         res->stat = &res->stat_data;
246 #endif
247 #if 0
248         {
249                 char name[256];
250                 FILE *f;
251                 ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg);
252                 if ((f = fopen(name, "wt")) != NULL) {
253                         dfs_dump(res->dfs, f);
254                         fclose(f);
255                 }
256                 dump_ir_block_graph(irg, "-lvchk");
257         }
258 #endif
259
260         /* fill the map which maps pre_num to block infos */
261         for (i = res->n_blocks - 1; i >= 0; --i) {
262                 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
263                 res->map[i]  = phase_get_or_set_irn_data(&res->ph, irn);
264         }
265
266         /* first of all, compute the transitive closure of the CFG *without* back edges */
267         red_trans_closure(res);
268
269         /* now fill the two remaining bitsets concerning back edges */
270         compute_back_edge_sets(res, get_irg_start_block(irg));
271
272         DEBUG_ONLY({
273                 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
274                 for (i = res->n_blocks - 1; i >= 0; --i) {
275                         ir_node *irn  = dfs_get_pre_num_node(res->dfs, i);
276                         bl_info_t *bi = get_block_info(res, irn);
277                         DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
278                         DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
279                         DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
280                         DBG((res->dbg, LEVEL_1, "\ttgt dom:   %B\n", bi->be_tgt_dom));
281                 }
282         })
283
284         DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
285         DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
286
287         return res;
288 }
289
290 void lv_chk_old_free(lv_chk_old_t *lv)
291 {
292         obstack_free(phase_obst(&lv->ph), NULL);
293         dfs_free(lv->dfs);
294         xfree(lv);
295 }
296
297 /**
298  * Check if a node is live at the end of a block.
299  * This function is for internal use as its code is shared between
300  * the in/end routines below. It is almost the "live_end" routine
301  * but passing in the bitset for recording the blocks where the variable
302  * is used saves some effort in the "live_in" routine. See below for
303  * details.
304  *
305  * @param lv    The liveness check environment.
306  * @param what  The node to check for.
307  * @param bl    The block under investigation.
308  * @param uses  A bitset where this routine records all ids of blocks
309  *              where this variable is used. Note that the bitset
310  *              is only guaranteed to be filled if the node was not
311  *              live at the end of the block.
312  * @return      1, if @p what is live at the end at @p bl.
313  */
314 unsigned lv_chk_old_bl_xxx(const lv_chk_old_t *lv, const ir_node *bl, const ir_node *what)
315 {
316         stat_ev_cnt_decl(uses);
317         stat_ev_cnt_decl(iter);
318
319         int res  = 0;
320         ir_node *what_bl;
321
322         assert(is_Block(bl) && "can only check for liveness in a block");
323
324         if (!is_liveness_node(what))
325                 return 0;
326
327         stat_ev_ctx_push_fobj("node", what);
328         stat_ev("lv_chk");
329
330         what_bl = get_nodes_block(what);
331         if (!block_dominates(what_bl, bl)) {
332                 stat_ev("lv_chk_no_dom");
333                 goto end;
334         }
335
336         /*
337          * If the block in question is the same as the definition block,
338          * the algorithm is simple. Just check for uses not inside this block.
339          */
340         if (what_bl == bl) {
341                 const ir_edge_t *edge;
342
343                 stat_ev("lv_chk_def_block");
344                 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", what, bl));
345                 foreach_out_edge (what, edge) {
346                         ir_node *use    = get_edge_src_irn(edge);
347                         ir_node *use_bl;
348
349                         if (!is_liveness_node(use))
350                                 continue;
351
352                         stat_ev_cnt_inc(uses);
353                         use_bl = get_nodes_block(use);
354                         if (is_Phi(use)) {
355                                 int pos = get_edge_src_pos(edge);
356                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
357
358                                 if (use_bl == bl) {
359                                         DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
360                                         res |= lv_chk_state_end;
361                                 }
362                         }
363
364                         if (use_bl != what_bl) {
365                                 res = lv_chk_state_end | lv_chk_state_out;
366                                 goto end;
367                         }
368                 }
369
370                 goto end;
371         }
372
373         /* this is the complicated case */
374         else {
375                 bitset_t *visited   = bitset_alloca(lv->n_blocks);
376                 bitset_t *to_visit  = bitset_alloca(lv->n_blocks);
377                 bitset_t *next      = bitset_alloca(lv->n_blocks);
378                 bitset_t *uses      = bitset_alloca(lv->n_blocks);
379                 bl_info_t *def      = get_block_info(lv, what_bl);
380                 bl_info_t *bli      = get_block_info(lv, bl);
381
382                 const ir_edge_t *edge;
383
384                 DBG((lv->dbg, LEVEL_2, "lv check different block %+F in %+F\n", what, bl));
385                 foreach_out_edge (what, edge) {
386                         ir_node *user   = get_edge_src_irn(edge);
387                         ir_node *use_bl;
388                         bl_info_t *bi;
389
390                         if (!is_liveness_node(user))
391                                 continue;
392
393                         stat_ev_cnt_inc(uses);
394                         use_bl = get_nodes_block(user);
395                         if (is_Phi(user)) {
396                                 int pos          = get_edge_src_pos(edge);
397
398                                 use_bl = get_Block_cfgpred_block(use_bl, pos);
399                                 bi     = get_block_info(lv, use_bl);
400
401                                 if (use_bl == bl)
402                                         res |= lv_chk_state_end | lv_chk_state_in;
403
404                                 bitset_set(uses, bi->id);
405                         }
406
407                         else {
408                                 bi = get_block_info(lv, use_bl);
409                                 bitset_set(uses, bi->id);
410                                 if (use_bl == bl)
411                                         res |= lv_chk_state_in;
412                         }
413
414                 }
415                 DBG((lv->dbg, LEVEL_2, "\tuses: %B, #: %d\n", uses, bitset_popcnt(uses)));
416
417                 bitset_clear(uses, def->id);
418                 bitset_set(to_visit, bli->id);
419                 do {
420                         int id        = bitset_next_set(to_visit, 0);
421                         bl_info_t *bi = lv->map[id];
422
423                         stat_ev_cnt_inc(iter);
424                         DBG((lv->dbg, LEVEL_2, "\tto visit: %B\n", to_visit));
425                         DBG((lv->dbg, LEVEL_2, "\tvisited:  %B\n", visited));
426
427                         /*
428                          * if one of the blocks is reachable, the node must be live there.
429                          * Note that this is not sufficient, since the nodes reachable
430                          * via back edges are not contained in the red_reachable set.
431                          */
432                         if (bitset_intersect(bi->red_reachable, uses)) {
433                                 res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
434                                 goto end;
435                         }
436
437                         /*
438                          * if not, we have to check the back edges in question, if
439                          * they lead to places which are reachable.
440                          */
441                         else {
442                                 bitset_set(visited, id);
443                                 bitset_or(visited, bi->red_reachable);
444
445                                 bitset_copy(next, bi->be_tgt_reach);
446                                 bitset_and(next, def->be_tgt_dom);
447                                 DBG((lv->dbg, LEVEL_2, "\tnext: %B\n----\n", next));
448
449                                 if (bitset_intersect(uses, next)) {
450                                         res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
451                                         goto end;
452                                 }
453
454                                 bitset_or(to_visit, next);
455                                 bitset_andnot(to_visit, visited);
456
457                         }
458                 } while (!bitset_is_empty(to_visit));
459         }
460
461 end:
462         stat_ev_cnt_done(uses, "lv_chk_uses");
463         stat_ev_cnt_done(iter, "lv_chk_iter");
464         stat_ev_ctx_pop();
465
466         return res;
467 }