34d428deabb5f5dd2cd07088036b47887abbbc00
[libfirm] / ir / ana / irlivechk.c
1 /*
2  * Copyright (C) 1995-2007 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file    livechk.c
22  * @date    21.04.2007
23  * @author  Sebastian Hack
24  * @version $Id$
25  * @summary
26  *
27  * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
28  *
29  * The speciality here is, that nothing has to be recomputed if new nodes are created
30  * or old ones deleted.
31  *
32  * This algo has one core routine check_live_end_internal() which performs the liveness check.
33  * It only relies on the precomputation done in the constructor, which in turn needs:
34  * - out edges
35  * - the dominance tree
36  * - data obtained from a depth-first-search
37  *
38  * The precomputation remains valid as long as the CFG is not altered.
39  */
40
41 #include <stdio.h>
42
43 #include "irgraph_t.h"
44 #include "irphase_t.h"
45 #include "iredges_t.h"
46 #include "irprintf.h"
47 #include "irdump.h"
48
49 #include "dfs_t.h"
50 #include "bitset.h"
51 #include "util.h"
52
53 #include "irlivechk.h"
54
55 #include "statev.h"
56
57 typedef struct _bl_info_t {
58         ir_node *block;            /**< The block. */
59
60         int id;                    /**< a tight number for the block.
61                                                                  we're just reusing the pre num from
62                                                                  the DFS. */
63
64         bitset_t *red_reachable;   /**< Holds all id's if blocks reachable
65                                                                  in the CFG modulo back edges. */
66
67         bitset_t *be_tgt_reach;    /**< target blocks of back edges whose
68                                                                  sources are reachable from this block
69                                                                  in the reduced graph. */
70
71         bitset_t *be_tgt_dom;      /**< target blocks of back edges which
72                                                                  are dominated by this block. */
73 } bl_info_t;
74
75 #define get_block_info(lv, bl) ((bl_info_t *) phase_get_irn_data(&(lv)->ph, bl))
76
77 struct _lv_chk_t {
78         ir_phase ph;
79         dfs_t *dfs;
80         DEBUG_ONLY(firm_dbg_module_t *dbg;)
81         int n_blocks;
82         bitset_t *back_edge_src;
83         bitset_t *back_edge_tgt;
84         bl_info_t **map;
85 };
86
87 static void *init_block_data(ir_phase *ph, ir_node *irn, void *old)
88 {
89         lv_chk_t *lv      = container_of(ph, lv_chk_t, ph);
90         bl_info_t *bi     = phase_alloc(ph, sizeof(bi[0]));
91
92         bi->id            = dfs_get_pre_num(lv->dfs, irn);
93         bi->block         = irn;
94         bi->red_reachable = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
95         bi->be_tgt_reach  = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
96         bi->be_tgt_dom    = bitset_obstack_alloc(phase_obst(ph), lv->n_blocks);
97         (void) old;
98         return bi;
99 }
100
101 /**
102  * Filter function to select all nodes for which liveness is computed.
103  * @param irn A node.
104  * @return    1 if the node shall be considered in liveness, 0 if not.
105  */
106 static INLINE int is_liveness_node(const ir_node *irn)
107 {
108         switch(get_irn_opcode(irn)) {
109         case iro_Block:
110         case iro_Bad:
111         case iro_End:
112                 return 0;
113         default:;
114         }
115
116         return 1;
117 }
118
119 /**
120  * Compute the transitive closure on the reduced graph.
121  * The reduced graph is the original graph without back edges.
122  * Since that is a DAG, a reverse post order of the graph gives a toposort
123  * which is ideally suited to compute the transitive closure.
124  * Note also, that the DFS tree of the reduced graph is the same than the one
125  * of the original graph. This saves us computing a new reverse post order.
126  * We also can re-use the DFS tree of the original graph.
127  */
128 static void red_trans_closure(lv_chk_t *lv)
129 {
130         int i, n;
131
132         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
133                 ir_node *bl   = dfs_get_post_num_node(lv->dfs, i);
134                 bl_info_t *bi = get_block_info(lv, bl);
135
136                 const ir_edge_t *edge;
137
138                 foreach_block_succ (bl, edge) {
139                         ir_node *succ = get_edge_src_irn(edge);
140                         bl_info_t *si = get_block_info(lv, succ);
141                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
142
143                         /*
144                          * if the successor is no back edge, include all reachable
145                          * blocks from there into the reachable set of the current node
146                          */
147                         if (kind != DFS_EDGE_BACK) {
148                                 assert(dfs_get_post_num(lv->dfs, bl)
149                                                 > dfs_get_post_num(lv->dfs, succ));
150                                 bitset_or(bi->red_reachable, si->red_reachable);
151                                 bitset_set(bi->red_reachable, si->id);
152                         }
153
154                         /* mark the block as a back edge src and succ as back edge tgt. */
155                         else {
156                                 bitset_set(lv->back_edge_src, bi->id);
157                                 bitset_set(lv->back_edge_tgt, si->id);
158                         }
159                 }
160
161         }
162
163 }
164
165 /**
166  * Compute the two back edge sets for each block.
167  * <code>be_tgt_reach</code> contains all target blocks of a back edges reachable from a node.
168  * <code>be_tgt_dom</code> contains all target blocks of back edges strictly dominated
169  * by a node.
170  */
171 static void compute_back_edge_sets(lv_chk_t *lv, ir_node *bl)
172 {
173         bl_info_t *bi = phase_get_or_set_irn_data(&(lv)->ph, bl);
174         bitset_t *tmp = bitset_alloca(lv->n_blocks);
175
176         bitset_pos_t elm;
177         ir_node *n;
178
179         dominates_for_each (bl, n) {
180                 bl_info_t *ni = phase_get_or_set_irn_data(&(lv)->ph, n);
181
182                 /* compute information for dominance sub tree */
183                 compute_back_edge_sets(lv, n);
184
185                 /*
186                  * of course all blocks dominated by blocks in the
187                  * subtree are also dominated by bl.
188                  */
189                 bitset_or(bi->be_tgt_dom, ni->be_tgt_dom);
190
191                 /*
192                  * add the immeditate dominee to the back edge tgt dominance
193                  * bitset if it is the target node of a back edge.
194                  */
195                 if (bitset_is_set(lv->back_edge_tgt, ni->id))
196                         bitset_set(bi->be_tgt_dom, ni->id);
197         }
198
199         /*
200          * iterate over all back edge src nodes which are reachable from
201          * this nodes and put the targets of the back edges in the be_tgt_reach
202          * bitset of the node.
203          */
204         bitset_copy(tmp, bi->red_reachable);
205         bitset_set(tmp, bi->id);
206         bitset_and(tmp, lv->back_edge_src);
207         bitset_foreach (tmp, elm) {
208                 ir_node *src = lv->map[elm]->block;
209                 const ir_edge_t *edge;
210
211                 foreach_block_succ (src, edge) {
212                         ir_node *succ        = get_edge_src_irn(edge);
213                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, src, succ);
214
215                         if (kind == DFS_EDGE_BACK) {
216                                 bl_info_t *si = get_block_info(lv, succ);
217                                 bitset_set(bi->be_tgt_reach, si->id);
218                         }
219                 }
220         }
221 }
222
223 lv_chk_t *lv_chk_new(ir_graph *irg)
224 {
225         lv_chk_t *res = xmalloc(sizeof(res[0]));
226         struct obstack *obst;
227         int i;
228
229         phase_init(&res->ph, "liveness check", irg, PHASE_DEFAULT_GROWTH, init_block_data, NULL);
230         obst = phase_obst(&res->ph);
231
232         FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
233
234         res->dfs           = dfs_new(&absgraph_irg_cfg_succ, irg);
235         res->n_blocks      = dfs_get_n_nodes(res->dfs);
236         res->back_edge_src = bitset_obstack_alloc(obst, res->n_blocks);
237         res->back_edge_tgt = bitset_obstack_alloc(obst, res->n_blocks);
238         res->map           = obstack_alloc(obst, res->n_blocks * sizeof(res->map[0]));
239
240 #ifdef ENABLE_STATS
241         memset(&res->stat_data, 0, sizeof(res->stat_data));
242         res->stat = &res->stat_data;
243 #endif
244 #if 0
245         {
246                 char name[256];
247                 FILE *f;
248                 ir_snprintf(name, sizeof(name), "dfs_%F.dot", irg);
249                 if ((f = fopen(name, "wt")) != NULL) {
250                         dfs_dump(res->dfs, f);
251                         fclose(f);
252                 }
253                 dump_ir_block_graph(irg, "-lvchk");
254         }
255 #endif
256
257         /* fill the map which maps pre_num to block infos */
258         for (i = res->n_blocks - 1; i >= 0; --i) {
259                 ir_node *irn = dfs_get_pre_num_node(res->dfs, i);
260                 res->map[i]  = phase_get_or_set_irn_data(&res->ph, irn);
261         }
262
263         /* first of all, compute the transitive closure of the CFG *without* back edges */
264         red_trans_closure(res);
265
266         /* now fill the two remaining bitsets concerning back edges */
267         compute_back_edge_sets(res, get_irg_start_block(irg));
268
269         DEBUG_ONLY({
270                 DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
271                 for (i = res->n_blocks - 1; i >= 0; --i) {
272                         ir_node *irn  = dfs_get_pre_num_node(res->dfs, i);
273                         bl_info_t *bi = get_block_info(res, irn);
274                         DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
275                         DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
276                         DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
277                         DBG((res->dbg, LEVEL_1, "\ttgt dom:   %B\n", bi->be_tgt_dom));
278                 }
279         })
280
281         DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
282         DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
283
284         return res;
285 }
286
287 void lv_chk_free(lv_chk_t *lv)
288 {
289         obstack_free(phase_obst(&lv->ph), NULL);
290         dfs_free(lv->dfs);
291         xfree(lv);
292 }
293
294 /**
295  * Check if a node is live at the end of a block.
296  * This function is for internal use as its code is shared between
297  * the in/end routines below. It is almost the "live_end" routine
298  * but passing in the bitset for recording the blocks where the variable
299  * is used saves some effort in the "live_in" routine. See below for
300  * details.
301  *
302  * @param lv    The liveness check environment.
303  * @param what  The node to check for.
304  * @param bl    The block under investigation.
305  * @param uses  A bitset where this routine records all ids of blocks
306  *              where this variable is used. Note that the bitset
307  *              is only guaranteed to be filled if the node was not
308  *              live at the end of the block.
309  * @return      1, if @p what is live at the end at @p bl.
310  */
311 unsigned lv_chk_bl_xxx(const lv_chk_t *lv, const ir_node *bl, const ir_node *what)
312 {
313         stat_ev_cnt_decl(uses);
314         stat_ev_cnt_decl(iter);
315
316         int res  = 0;
317         ir_node *what_bl;
318
319         assert(is_Block(bl) && "can only check for liveness in a block");
320
321         if (!is_liveness_node(what))
322                 return 0;
323
324         stat_ev_ctx_push_fobj("node", what);
325         stat_ev("lv_chk");
326
327         what_bl = get_nodes_block(what);
328         if (!block_dominates(what_bl, bl)) {
329                 stat_ev("lv_chk_no_dom");
330                 goto end;
331         }
332
333         /*
334          * If the block in question is the same as the definition block,
335          * the algorithm is simple. Just check for uses not inside this block.
336          */
337         if (what_bl == bl) {
338                 const ir_edge_t *edge;
339
340                 stat_ev("lv_chk_def_block");
341                 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", what, bl));
342                 foreach_out_edge (what, edge) {
343                         ir_node *use    = get_edge_src_irn(edge);
344                         ir_node *use_bl;
345
346                         if (!is_liveness_node(use))
347                                 continue;
348
349                         stat_ev_cnt_inc(uses);
350                         use_bl = get_nodes_block(use);
351                         if (is_Phi(use)) {
352                                 int pos = get_edge_src_pos(edge);
353                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
354
355                                 if (use_bl == bl) {
356                                         DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
357                                         res |= lv_chk_state_end;
358                                 }
359                         }
360
361                         if (use_bl != what_bl) {
362                                 res = lv_chk_state_end | lv_chk_state_out;
363                                 goto end;
364                         }
365                 }
366
367                 goto end;
368         }
369
370         /* this is the complicated case */
371         else {
372                 bitset_t *visited   = bitset_alloca(lv->n_blocks);
373                 bitset_t *to_visit  = bitset_alloca(lv->n_blocks);
374                 bitset_t *next      = bitset_alloca(lv->n_blocks);
375                 bitset_t *uses      = bitset_alloca(lv->n_blocks);
376                 bl_info_t *def      = get_block_info(lv, what_bl);
377                 bl_info_t *bli      = get_block_info(lv, bl);
378
379                 const ir_edge_t *edge;
380
381                 DBG((lv->dbg, LEVEL_2, "lv check different block %+F in %+F\n", what, bl));
382                 foreach_out_edge (what, edge) {
383                         ir_node *user   = get_edge_src_irn(edge);
384                         ir_node *use_bl;
385                         bl_info_t *bi;
386
387                         if (!is_liveness_node(user))
388                                 continue;
389
390                         stat_ev_cnt_inc(uses);
391                         use_bl = get_nodes_block(user);
392                         if (is_Phi(user)) {
393                                 int pos          = get_edge_src_pos(edge);
394
395                                 use_bl = get_Block_cfgpred_block(use_bl, pos);
396                                 bi     = get_block_info(lv, use_bl);
397
398                                 if (use_bl == bl)
399                                         res |= lv_chk_state_end | lv_chk_state_in;
400
401                                 bitset_set(uses, bi->id);
402                         }
403
404                         else {
405                                 bi = get_block_info(lv, use_bl);
406                                 bitset_set(uses, bi->id);
407                                 if (use_bl == bl)
408                                         res |= lv_chk_state_in;
409                         }
410
411                 }
412                 DBG((lv->dbg, LEVEL_2, "\tuses: %B, #: %d\n", uses, bitset_popcnt(uses)));
413
414                 bitset_clear(uses, def->id);
415                 bitset_set(to_visit, bli->id);
416                 do {
417                         int id        = bitset_next_set(to_visit, 0);
418                         bl_info_t *bi = lv->map[id];
419
420                         stat_ev_cnt_inc(iter);
421                         DBG((lv->dbg, LEVEL_2, "\tto visit: %B\n", to_visit));
422                         DBG((lv->dbg, LEVEL_2, "\tvisited:  %B\n", visited));
423
424                         /*
425                          * if one of the blocks is reachable, the node must be live there.
426                          * Note that this is not sufficient, since the nodes reachable
427                          * via back edges are not contained in the red_reachable set.
428                          */
429                         if (bitset_intersect(bi->red_reachable, uses)) {
430                                 res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
431                                 goto end;
432                         }
433
434                         /*
435                          * if not, we have to check the back edges in question, if
436                          * they lead to places which are reachable.
437                          */
438                         else {
439                                 bitset_set(visited, id);
440                                 bitset_or(visited, bi->red_reachable);
441
442                                 bitset_copy(next, bi->be_tgt_reach);
443                                 bitset_and(next, def->be_tgt_dom);
444                                 DBG((lv->dbg, LEVEL_2, "\tnext: %B\n----\n", next));
445
446                                 if (bitset_intersect(uses, next)) {
447                                         res = lv_chk_state_end | lv_chk_state_out | lv_chk_state_in;
448                                         goto end;
449                                 }
450
451                                 bitset_or(to_visit, next);
452                                 bitset_andnot(to_visit, visited);
453
454                         }
455                 } while (!bitset_is_empty(to_visit));
456         }
457
458 end:
459         stat_ev_cnt_done(uses, "lv_chk_uses");
460         stat_ev_cnt_done(iter, "lv_chk_iter");
461         stat_ev_ctx_pop();
462
463         return res;
464 }