becopyilp: Inline struct size_red_t into struct ilp_env_t.
[libfirm] / ir / ana / irlivechk.c
1 /*
2  * This file is part of libFirm.
3  * Copyright (C) 2012 Inria Rhone-Alpes.
4  */
5
6 /**
7  * @file
8  * @date    21.04.2007
9  * @author  Sebastian Hack
10  * @brief
11  *
12  * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
13  *
14  * The speciality here is, that nothing has to be recomputed if new nodes are created
15  * or old ones deleted.
16  *
17  * This algo has one core routine check_live_end_internal() which performs the liveness check.
18  * It only relies on the precomputation done in the constructor, which in turn needs:
19  * - out edges
20  * - the dominance tree
21  * - data obtained from a depth-first-search
22  *
23  * The precomputation remains valid as long as the CFG is not altered.
24  */
25 #include <config.h>
26
27 #include <stdio.h>
28
29 /* statev is expensive here, only enable when needed */
30 #define DISABLE_STATEV
31
32 #include "irgraph_t.h"
33 #include "irnode_t.h"
34 #include "irnodemap.h"
35 #include "iredges_t.h"
36
37 #include "irprintf.h"
38 #include "irdom.h"
39 #include "irdump.h"
40
41 #include "dfs_t.h"
42 #include "bitset.h"
43 #include "util.h"
44
45 #include "irlivechk.h"
46
47 #include "statev_t.h"
48
49 typedef struct bl_info_t {
50         const ir_node *block;      /**< The block. */
51
52         int be_tgt_calc : 1;
53         int id : 31;               /**< a tight number for the block.
54                                                                  we're just reusing the pre num from
55                                                                  the DFS. */
56         bitset_t *red_reachable;   /**< Holds all id's if blocks reachable
57                                                                  in the CFG modulo back edges. */
58
59         bitset_t *be_tgt_reach;    /**< target blocks of back edges whose
60                                                                  sources are reachable from this block
61                                                                  in the reduced graph. */
62 } bl_info_t;
63
64 struct lv_chk_t {
65         ir_nodemap     block_infos;
66         struct obstack obst;
67         dfs_t         *dfs;
68         int            n_blocks;
69         bitset_t      *back_edge_src;
70         bitset_t      *back_edge_tgt;
71         bl_info_t    **map;
72         DEBUG_ONLY(firm_dbg_module_t *dbg;)
73 };
74
75 static bl_info_t *get_block_info(lv_chk_t *lv, const ir_node *block)
76 {
77         bl_info_t *info = ir_nodemap_get(bl_info_t, &lv->block_infos, block);
78         if (info == NULL) {
79                 info                = OALLOC(&lv->obst, bl_info_t);
80                 info->id            = get_Block_dom_tree_pre_num(block);
81                 info->block         = block;
82                 info->red_reachable = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
83                 info->be_tgt_reach  = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
84                 info->be_tgt_calc   = 0;
85                 ir_nodemap_insert(&lv->block_infos, block, info);
86         }
87         return info;
88 }
89
90 /**
91  * Compute the transitive closure on the reduced graph.
92  * The reduced graph is the original graph without back edges.
93  * Since that is a DAG, a reverse post order of the graph gives a toposort
94  * which is ideally suited to compute the transitive closure.
95  * Note also, that the DFS tree of the reduced graph is the same than the one
96  * of the original graph. This saves us computing a new reverse post order.
97  * We also can re-use the DFS tree of the original graph.
98  */
99 static void red_trans_closure(lv_chk_t *lv)
100 {
101         int i, n;
102
103         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
104                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
105                 bl_info_t *bi = get_block_info(lv, bl);
106
107                 bitset_set(bi->red_reachable, bi->id);
108                 foreach_block_succ (bl, edge) {
109                         ir_node *succ = get_edge_src_irn(edge);
110                         bl_info_t *si = get_block_info(lv, succ);
111                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
112
113                         /*
114                          * if the successor is no back edge, include all reachable
115                          * blocks from there into the reachable set of the current node
116                          */
117                         if (kind != DFS_EDGE_BACK) {
118                                 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
119                                 bitset_or(bi->red_reachable, si->red_reachable);
120                         }
121
122                         /* mark the block as a back edge src and succ as back edge tgt. */
123                         else {
124                                 bitset_set(lv->back_edge_src, bi->id);
125                                 bitset_set(lv->back_edge_tgt, si->id);
126                         }
127                 }
128
129         }
130
131 }
132
133 static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
134 {
135         bitset_t *tmp = bitset_alloca(lv->n_blocks);
136         bl_info_t *bi = get_block_info(lv, bl);
137
138         DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id));
139
140         /* put all back edge sources reachable (reduced) from here in tmp */
141         bitset_copy(tmp, bi->red_reachable);
142         bitset_set(tmp, bi->id);
143         bitset_and(tmp, lv->back_edge_src);
144         bi->be_tgt_calc = 1;
145
146         DBG((lv->dbg, LEVEL_2, "\treachable be src: %B\n", tmp));
147
148         /* iterate over them ... */
149         bitset_foreach(tmp, elm) {
150                 bl_info_t *si = lv->map[elm];
151
152                 /* and find back edge targets which are not reduced reachable from bl */
153                 foreach_block_succ (si->block, edge) {
154                         ir_node *tgt         = get_edge_src_irn(edge);
155                         bl_info_t *ti        = get_block_info(lv, tgt);
156                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, si->block, tgt);
157
158                         if (kind == DFS_EDGE_BACK && !bitset_is_set(bi->red_reachable, ti->id)) {
159                                 if (!ti->be_tgt_calc)
160                                         compute_back_edge_chain(lv, tgt);
161                                 bitset_set(bi->be_tgt_reach, ti->id);
162                                 bitset_or(bi->be_tgt_reach, ti->be_tgt_reach);
163                         }
164                 }
165                 bitset_clear(bi->be_tgt_reach, bi->id);
166         }
167 }
168
169
170 static inline void compute_back_edge_chains(lv_chk_t *lv)
171 {
172         int i, n;
173
174         DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src));
175         bitset_foreach(lv->back_edge_src, elm) {
176                 compute_back_edge_chain(lv, lv->map[elm]->block);
177         }
178
179         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
180                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
181                 bl_info_t *bi     = get_block_info(lv, bl);
182
183                 if (!bitset_is_set(lv->back_edge_tgt, bi->id)) {
184                         foreach_block_succ (bl, edge) {
185                                 ir_node *succ = get_edge_src_irn(edge);
186                                 bl_info_t *si = get_block_info(lv, succ);
187                                 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
188
189                                 if (kind != DFS_EDGE_BACK) {
190                                         assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
191                                         bitset_or(bi->be_tgt_reach, si->be_tgt_reach);
192                                 }
193                         }
194                 }
195         }
196
197         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
198                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
199                 bl_info_t *bi     = get_block_info(lv, bl);
200                 bitset_set(bi->be_tgt_reach, bi->id);
201         }
202 }
203
204 lv_chk_t *lv_chk_new(ir_graph *irg)
205 {
206         lv_chk_t *res = XMALLOC(lv_chk_t);
207         int i;
208
209         assure_doms(irg);
210
211         stat_ev_tim_push();
212         ir_nodemap_init(&res->block_infos, irg);
213         obstack_init(&res->obst);
214
215         FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
216
217         res->dfs           = dfs_new(&absgraph_irg_cfg_succ, irg);
218         res->n_blocks      = dfs_get_n_nodes(res->dfs);
219         res->back_edge_src = bitset_obstack_alloc(&res->obst, res->n_blocks);
220         res->back_edge_tgt = bitset_obstack_alloc(&res->obst, res->n_blocks);
221         res->map           = OALLOCNZ(&res->obst, bl_info_t*, res->n_blocks);
222
223         /* fill the map which maps pre_num to block infos */
224         for (i = res->n_blocks - 1; i >= 0; --i) {
225                 ir_node *irn  = (ir_node *) dfs_get_pre_num_node(res->dfs, i);
226                 bl_info_t *bi = get_block_info(res, irn);
227                 assert(bi->id < res->n_blocks);
228                 assert(res->map[bi->id] == NULL);
229                 res->map[bi->id] = bi;
230         }
231
232         /* first of all, compute the transitive closure of the CFG *without* back edges */
233         red_trans_closure(res);
234
235         /* compute back edge chains */
236         compute_back_edge_chains(res);
237
238 #ifndef NDEBUG
239         DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
240         for (i = res->n_blocks - 1; i >= 0; --i) {
241                 const ir_node *irn = (const ir_node*) dfs_get_pre_num_node(res->dfs, i);
242                 bl_info_t *bi      = get_block_info(res, irn);
243                 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
244                 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
245                 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
246         }
247 #endif
248
249         DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
250         DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
251
252         stat_ev_tim_pop("lv_chk_cons_time");
253         return res;
254 }
255
256 void lv_chk_free(lv_chk_t *lv)
257 {
258         dfs_free(lv->dfs);
259         obstack_free(&lv->obst, NULL);
260         ir_nodemap_destroy(&lv->block_infos);
261         xfree(lv);
262 }
263
264 unsigned lv_chk_bl_xxx(lv_chk_t *lv, const ir_node *bl, const ir_node *var)
265 {
266         int res  = 0;
267         ir_node *def_bl;
268         stat_ev_cnt_decl(uses);
269         stat_ev_cnt_decl(iter);
270
271         assert(is_Block(bl) && "can only check for liveness in a block");
272
273         /* If the variable ist no liveness related var, bail out. */
274         if (!is_liveness_node(var))
275                 return 0;
276
277         stat_ev_ctx_push_fmt("lv_chk", "%u", get_irn_idx(var));
278         stat_ev_tim_push();
279
280         /* If there is no dominance relation, go out, too */
281         def_bl = get_nodes_block(var);
282         if (!block_dominates(def_bl, bl)) {
283                 stat_ev("lv_chk_no_dom");
284                 goto end;
285         }
286
287         /*
288          * If the block in question is the same as the definition block,
289          * the algorithm is simple. Just check for uses not inside this block.
290          */
291         if (def_bl == bl) {
292                 stat_ev("lv_chk_def_block");
293                 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl));
294                 foreach_out_edge (var, edge) {
295                         ir_node *use    = get_edge_src_irn(edge);
296                         ir_node *use_bl;
297
298                         if (!is_liveness_node(use))
299                                 continue;
300
301                         stat_ev_cnt_inc(uses);
302                         use_bl = get_nodes_block(use);
303                         if (is_Phi(use)) {
304                                 int pos = get_edge_src_pos(edge);
305                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
306
307                                 if (use_bl == bl) {
308                                         DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
309                                         res |= lv_chk_state_end;
310                                 }
311                         }
312
313                         if (use_bl != def_bl) {
314                                 res = lv_chk_state_end | lv_chk_state_out;
315                                 goto end;
316                         }
317                 }
318
319                 goto end;
320         }
321
322         /*
323          * this is the more complicated case.
324          * We try to gather as much information as possible during looking
325          * at the uses.
326          *
327          * Note that we know for sure that bl != def_bl. That is sometimes
328          * silently exploited below.
329          */
330         else {
331                 bl_info_t *def = get_block_info(lv, def_bl);
332                 bl_info_t *bli = get_block_info(lv, bl);
333                 bitset_t *uses = bitset_alloca(lv->n_blocks);
334                 bitset_t *Tq;
335
336                 size_t i;
337                 unsigned min_dom, max_dom;
338
339                 /* if the block has no DFS info, it cannot be reached.
340                  * This can happen in functions with endless loops.
341                  * we then go out, since nothing is live there.
342                  *
343                  * TODO: Is that right?
344                  */
345                 if (!bli)
346                         goto end;
347
348                 (void) def;
349                 DBG((lv->dbg, LEVEL_2, "lv check %+F (def in %+F #%d) in different block %+F #%d\n",
350                                         var, def_bl, def->id, bl, bli->id));
351
352                 foreach_out_edge (var, edge) {
353                         ir_node *user = get_edge_src_irn(edge);
354                         int mask      = lv_chk_state_in;
355
356                         ir_node *use_bl;
357                         bl_info_t *bi;
358
359                         /* if the user is no liveness node, the use does not count */
360                         if (!is_liveness_node(user))
361                                 continue;
362
363                         stat_ev_cnt_inc(uses);
364
365                         /* if the user is a phi, the use is in the predecessor
366                          * furthermore, prepare a mask so that in the case where
367                          * bl (the block in question) coincides with a use, it
368                          * can be marked live_end there. */
369                         use_bl = get_nodes_block(user);
370                         if (is_Phi(user)) {
371                                 int pos = get_edge_src_pos(edge);
372                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
373                                 mask   |= lv_chk_state_end;
374                         }
375
376
377                         /* if the use block coincides with the query block, we
378                          * already gather a little liveness information.
379                          * The variable is surely live there, since bl != def_bl
380                          * (that case is treated above). */
381                         if (use_bl == bl)
382                                 res |= mask;
383
384                         bi = get_block_info(lv, use_bl);
385
386                         if (bi)
387                                 bitset_set(uses, bi->id);
388                 }
389
390                 /* get the dominance range which really matters. all uses outside
391                  * the definition's dominance range are not to consider. note,
392                  * that the definition itself is also not considered. The case
393                  * where bl == def_bl is considered above. */
394                 min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
395                 max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
396
397                 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
398
399                 /* prepare a set with all reachable back edge targets.
400                  * this will determine our "looking points" from where
401                  * we will search/find the calculated uses. */
402                 Tq = bli->be_tgt_reach;
403
404                 /* now, visit all viewing points in the temporary bitset lying
405                  * in the dominance range of the variable. Note that for reducible
406                  * flow-graphs the first iteration is sufficient and the loop
407                  * will be left. */
408                 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", Tq, min_dom, max_dom));
409                 i = bitset_next_set(Tq, min_dom);
410                 while (i <= max_dom) {
411                         bl_info_t *ti = lv->map[i];
412                         int use_in_current_block = bitset_is_set(uses, ti->id);
413
414                         stat_ev_cnt_inc(iter);
415
416                         /*
417                          * This is somewhat tricky. Since this routine handles both, live in
418                          * and end/out we have to handle all the border cases correctly.
419                          * Each node is in its own red_reachable set (see calculation
420                          * function above). That means, that in the case where bl == t, the
421                          * intersection check of uses and reachability below will always
422                          * find an intersection, namely t.
423                          *
424                          * However, if a block contains a use and the variable is dead
425                          * afterwards, it is not live end/out at that block. Besides
426                          * back-edge target. If a var is live-in at a back-edge target it
427                          * is also live out/end there since the variable is live in the
428                          * underlying loop. So in the case where t == bl and that is not
429                          * a back-edge target, we have to remove that use from consideration
430                          * to determine if the var is live out/end there.
431                          *
432                          * Note that the live in information has been calculated by the
433                          * uses iteration above.
434                          */
435                         if (ti == bli && !bitset_is_set(lv->back_edge_tgt, ti->id)) {
436                                 DBG((lv->dbg, LEVEL_2, "\tlooking not from a back edge target and q == t. removing use: %d\n", ti->id));
437                                 bitset_clear(uses, ti->id);
438                         }
439
440                         /* If we can reach a use, the variable is live there and we say goodbye */
441                         DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
442                         if (bitset_intersect(ti->red_reachable, uses)) {
443                                 res |= lv_chk_state_in | lv_chk_state_out | lv_chk_state_end;
444                                 goto end;
445                         }
446
447                         /*
448                          * if we deleted a use do to the commentary above, we have to
449                          * re-add it since it might be visible from further view points
450                          * (we only need that in the non-reducible case).
451                          */
452                         if (use_in_current_block)
453                                 bitset_set(uses, ti->id);
454
455                         i = bitset_next_set(Tq, get_Block_dom_max_subtree_pre_num(ti->block) + 1);
456                 }
457
458         }
459
460 end:
461         stat_ev_tim_pop("lv_chk_query_time");
462         stat_ev_cnt_done(uses, "lv_chk_uses");
463         stat_ev_cnt_done(iter, "lv_chk_iter");
464         stat_ev_ctx_pop("lv_chk");
465
466         return res;
467 }