make statev API public
[libfirm] / ir / ana / irlivechk.c
1 /*
2  * Copyright (C) 1995-2007 Inria Rhone-Alpes.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file    livechk.c
22  * @date    21.04.2007
23  * @author  Sebastian Hack
24  * @brief
25  *
26  * Liveness checks as developed by Benoit Boissinot, Fabrice Rastello and myself.
27  *
28  * The speciality here is, that nothing has to be recomputed if new nodes are created
29  * or old ones deleted.
30  *
31  * This algo has one core routine check_live_end_internal() which performs the liveness check.
32  * It only relies on the precomputation done in the constructor, which in turn needs:
33  * - out edges
34  * - the dominance tree
35  * - data obtained from a depth-first-search
36  *
37  * The precomputation remains valid as long as the CFG is not altered.
38  */
39 #include <config.h>
40
41 #include <stdio.h>
42
43 /* statev is expensive here, only enable when needed */
44 #define DISABLE_STATEV
45
46 #include "irgraph_t.h"
47 #include "irnode_t.h"
48 #include "irnodemap.h"
49 #include "iredges_t.h"
50
51 #include "irprintf.h"
52 #include "irdom.h"
53 #include "irdump.h"
54
55 #include "dfs_t.h"
56 #include "bitset.h"
57 #include "util.h"
58
59 #include "irlivechk.h"
60
61 #include "statev_t.h"
62
63 typedef struct bl_info_t {
64         const ir_node *block;      /**< The block. */
65
66         int be_tgt_calc : 1;
67         int id : 31;               /**< a tight number for the block.
68                                                                  we're just reusing the pre num from
69                                                                  the DFS. */
70         bitset_t *red_reachable;   /**< Holds all id's if blocks reachable
71                                                                  in the CFG modulo back edges. */
72
73         bitset_t *be_tgt_reach;    /**< target blocks of back edges whose
74                                                                  sources are reachable from this block
75                                                                  in the reduced graph. */
76 } bl_info_t;
77
78 struct lv_chk_t {
79         ir_nodemap     block_infos;
80         struct obstack obst;
81         dfs_t         *dfs;
82         int            n_blocks;
83         bitset_t      *back_edge_src;
84         bitset_t      *back_edge_tgt;
85         bl_info_t    **map;
86         DEBUG_ONLY(firm_dbg_module_t *dbg;)
87 };
88
89 static bl_info_t *get_block_info(lv_chk_t *lv, const ir_node *block)
90 {
91         bl_info_t *info = ir_nodemap_get(bl_info_t, &lv->block_infos, block);
92         if (info == NULL) {
93                 info                = OALLOC(&lv->obst, bl_info_t);
94                 info->id            = get_Block_dom_tree_pre_num(block);
95                 info->block         = block;
96                 info->red_reachable = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
97                 info->be_tgt_reach  = bitset_obstack_alloc(&lv->obst, lv->n_blocks);
98                 info->be_tgt_calc   = 0;
99                 ir_nodemap_insert(&lv->block_infos, block, info);
100         }
101         return info;
102 }
103
104 /**
105  * Filter function to select all nodes for which liveness is computed.
106  * @param irn A node.
107  * @return    1 if the node shall be considered in liveness, 0 if not.
108  */
109 static inline int is_liveness_node(const ir_node *irn)
110 {
111         switch (get_irn_opcode(irn)) {
112         case iro_Block:
113         case iro_Bad:
114         case iro_End:
115         case iro_Anchor:
116                 return 0;
117         default:
118                 break;
119         }
120
121         return 1;
122 }
123
124 /**
125  * Compute the transitive closure on the reduced graph.
126  * The reduced graph is the original graph without back edges.
127  * Since that is a DAG, a reverse post order of the graph gives a toposort
128  * which is ideally suited to compute the transitive closure.
129  * Note also, that the DFS tree of the reduced graph is the same than the one
130  * of the original graph. This saves us computing a new reverse post order.
131  * We also can re-use the DFS tree of the original graph.
132  */
133 static void red_trans_closure(lv_chk_t *lv)
134 {
135         int i, n;
136
137         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
138                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
139                 bl_info_t *bi = get_block_info(lv, bl);
140
141                 bitset_set(bi->red_reachable, bi->id);
142                 foreach_block_succ (bl, edge) {
143                         ir_node *succ = get_edge_src_irn(edge);
144                         bl_info_t *si = get_block_info(lv, succ);
145                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
146
147                         /*
148                          * if the successor is no back edge, include all reachable
149                          * blocks from there into the reachable set of the current node
150                          */
151                         if (kind != DFS_EDGE_BACK) {
152                                 assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
153                                 bitset_or(bi->red_reachable, si->red_reachable);
154                         }
155
156                         /* mark the block as a back edge src and succ as back edge tgt. */
157                         else {
158                                 bitset_set(lv->back_edge_src, bi->id);
159                                 bitset_set(lv->back_edge_tgt, si->id);
160                         }
161                 }
162
163         }
164
165 }
166
167 static void compute_back_edge_chain(lv_chk_t *lv, const ir_node *bl)
168 {
169         bitset_t *tmp = bitset_alloca(lv->n_blocks);
170         bl_info_t *bi = get_block_info(lv, bl);
171
172         DBG((lv->dbg, LEVEL_2, "computing T_%d\n", bi->id));
173
174         /* put all back edge sources reachable (reduced) from here in tmp */
175         bitset_copy(tmp, bi->red_reachable);
176         bitset_set(tmp, bi->id);
177         bitset_and(tmp, lv->back_edge_src);
178         bi->be_tgt_calc = 1;
179
180         DBG((lv->dbg, LEVEL_2, "\treachable be src: %B\n", tmp));
181
182         /* iterate over them ... */
183         bitset_foreach(tmp, elm) {
184                 bl_info_t *si = lv->map[elm];
185
186                 /* and find back edge targets which are not reduced reachable from bl */
187                 foreach_block_succ (si->block, edge) {
188                         ir_node *tgt         = get_edge_src_irn(edge);
189                         bl_info_t *ti        = get_block_info(lv, tgt);
190                         dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, si->block, tgt);
191
192                         if (kind == DFS_EDGE_BACK && !bitset_is_set(bi->red_reachable, ti->id)) {
193                                 if (!ti->be_tgt_calc)
194                                         compute_back_edge_chain(lv, tgt);
195                                 bitset_set(bi->be_tgt_reach, ti->id);
196                                 bitset_or(bi->be_tgt_reach, ti->be_tgt_reach);
197                         }
198                 }
199                 bitset_clear(bi->be_tgt_reach, bi->id);
200         }
201 }
202
203
204 static inline void compute_back_edge_chains(lv_chk_t *lv)
205 {
206         int i, n;
207
208         DBG((lv->dbg, LEVEL_2, "back edge sources: %B\n", lv->back_edge_src));
209         bitset_foreach(lv->back_edge_src, elm) {
210                 compute_back_edge_chain(lv, lv->map[elm]->block);
211         }
212
213         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
214                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
215                 bl_info_t *bi     = get_block_info(lv, bl);
216
217                 if (!bitset_is_set(lv->back_edge_tgt, bi->id)) {
218                         foreach_block_succ (bl, edge) {
219                                 ir_node *succ = get_edge_src_irn(edge);
220                                 bl_info_t *si = get_block_info(lv, succ);
221                                 dfs_edge_kind_t kind = dfs_get_edge_kind(lv->dfs, bl, succ);
222
223                                 if (kind != DFS_EDGE_BACK) {
224                                         assert(dfs_get_post_num(lv->dfs, bl) > dfs_get_post_num(lv->dfs, succ));
225                                         bitset_or(bi->be_tgt_reach, si->be_tgt_reach);
226                                 }
227                         }
228                 }
229         }
230
231         for (i = 0, n = dfs_get_n_nodes(lv->dfs); i < n; ++i) {
232                 const ir_node *bl = (const ir_node*) dfs_get_post_num_node(lv->dfs, i);
233                 bl_info_t *bi     = get_block_info(lv, bl);
234                 bitset_set(bi->be_tgt_reach, bi->id);
235         }
236 }
237
238 lv_chk_t *lv_chk_new(ir_graph *irg)
239 {
240         lv_chk_t *res = XMALLOC(lv_chk_t);
241         int i;
242
243         assure_doms(irg);
244
245         stat_ev_tim_push();
246         ir_nodemap_init(&res->block_infos, irg);
247         obstack_init(&res->obst);
248
249         FIRM_DBG_REGISTER(res->dbg, "ir.ana.lvchk");
250
251         res->dfs           = dfs_new(&absgraph_irg_cfg_succ, irg);
252         res->n_blocks      = dfs_get_n_nodes(res->dfs);
253         res->back_edge_src = bitset_obstack_alloc(&res->obst, res->n_blocks);
254         res->back_edge_tgt = bitset_obstack_alloc(&res->obst, res->n_blocks);
255         res->map           = OALLOCNZ(&res->obst, bl_info_t*, res->n_blocks);
256
257         /* fill the map which maps pre_num to block infos */
258         for (i = res->n_blocks - 1; i >= 0; --i) {
259                 ir_node *irn  = (ir_node *) dfs_get_pre_num_node(res->dfs, i);
260                 bl_info_t *bi = get_block_info(res, irn);
261                 assert(bi->id < res->n_blocks);
262                 assert(res->map[bi->id] == NULL);
263                 res->map[bi->id] = bi;
264         }
265
266         /* first of all, compute the transitive closure of the CFG *without* back edges */
267         red_trans_closure(res);
268
269         /* compute back edge chains */
270         compute_back_edge_chains(res);
271
272 #ifndef NDEBUG
273         DBG((res->dbg, LEVEL_1, "liveness chk in %+F\n", irg));
274         for (i = res->n_blocks - 1; i >= 0; --i) {
275                 const ir_node *irn = (const ir_node*) dfs_get_pre_num_node(res->dfs, i);
276                 bl_info_t *bi      = get_block_info(res, irn);
277                 DBG((res->dbg, LEVEL_1, "lv_chk for %d -> %+F\n", i, irn));
278                 DBG((res->dbg, LEVEL_1, "\tred reach: %B\n", bi->red_reachable));
279                 DBG((res->dbg, LEVEL_1, "\ttgt reach: %B\n", bi->be_tgt_reach));
280         }
281 #endif
282
283         DBG((res->dbg, LEVEL_1, "back edge src: %B\n", res->back_edge_src));
284         DBG((res->dbg, LEVEL_1, "back edge tgt: %B\n", res->back_edge_tgt));
285
286         stat_ev_tim_pop("lv_chk_cons_time");
287         return res;
288 }
289
290 void lv_chk_free(lv_chk_t *lv)
291 {
292         dfs_free(lv->dfs);
293         obstack_free(&lv->obst, NULL);
294         ir_nodemap_destroy(&lv->block_infos);
295         xfree(lv);
296 }
297
298 unsigned lv_chk_bl_xxx(lv_chk_t *lv, const ir_node *bl, const ir_node *var)
299 {
300         int res  = 0;
301         ir_node *def_bl;
302         stat_ev_cnt_decl(uses);
303         stat_ev_cnt_decl(iter);
304
305         assert(is_Block(bl) && "can only check for liveness in a block");
306
307         /* If the variable ist no liveness related var, bail out. */
308         if (!is_liveness_node(var))
309                 return 0;
310
311         stat_ev_ctx_push_fmt("lv_chk", "%u", get_irn_idx(var));
312         stat_ev_tim_push();
313
314         /* If there is no dominance relation, go out, too */
315         def_bl = get_nodes_block(var);
316         if (!block_dominates(def_bl, bl)) {
317                 stat_ev("lv_chk_no_dom");
318                 goto end;
319         }
320
321         /*
322          * If the block in question is the same as the definition block,
323          * the algorithm is simple. Just check for uses not inside this block.
324          */
325         if (def_bl == bl) {
326                 stat_ev("lv_chk_def_block");
327                 DBG((lv->dbg, LEVEL_2, "lv check same block %+F in %+F\n", var, bl));
328                 foreach_out_edge (var, edge) {
329                         ir_node *use    = get_edge_src_irn(edge);
330                         ir_node *use_bl;
331
332                         if (!is_liveness_node(use))
333                                 continue;
334
335                         stat_ev_cnt_inc(uses);
336                         use_bl = get_nodes_block(use);
337                         if (is_Phi(use)) {
338                                 int pos = get_edge_src_pos(edge);
339                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
340
341                                 if (use_bl == bl) {
342                                         DBG((lv->dbg, LEVEL_2, "\tphi %+F in succ %+F,%d -> live end\n", use, use_bl, pos));
343                                         res |= lv_chk_state_end;
344                                 }
345                         }
346
347                         if (use_bl != def_bl) {
348                                 res = lv_chk_state_end | lv_chk_state_out;
349                                 goto end;
350                         }
351                 }
352
353                 goto end;
354         }
355
356         /*
357          * this is the more complicated case.
358          * We try to gather as much information as possible during looking
359          * at the uses.
360          *
361          * Note that we know for sure that bl != def_bl. That is sometimes
362          * silently exploited below.
363          */
364         else {
365                 bl_info_t *def = get_block_info(lv, def_bl);
366                 bl_info_t *bli = get_block_info(lv, bl);
367                 bitset_t *uses = bitset_alloca(lv->n_blocks);
368                 bitset_t *Tq;
369
370                 size_t i;
371                 unsigned min_dom, max_dom;
372
373                 /* if the block has no DFS info, it cannot be reached.
374                  * This can happen in functions with endless loops.
375                  * we then go out, since nothing is live there.
376                  *
377                  * TODO: Is that right?
378                  */
379                 if (!bli)
380                         goto end;
381
382                 (void) def;
383                 DBG((lv->dbg, LEVEL_2, "lv check %+F (def in %+F #%d) in different block %+F #%d\n",
384                                         var, def_bl, def->id, bl, bli->id));
385
386                 foreach_out_edge (var, edge) {
387                         ir_node *user = get_edge_src_irn(edge);
388                         int mask      = lv_chk_state_in;
389
390                         ir_node *use_bl;
391                         bl_info_t *bi;
392
393                         /* if the user is no liveness node, the use does not count */
394                         if (!is_liveness_node(user))
395                                 continue;
396
397                         stat_ev_cnt_inc(uses);
398
399                         /* if the user is a phi, the use is in the predecessor
400                          * furthermore, prepare a mask so that in the case where
401                          * bl (the block in question) coincides with a use, it
402                          * can be marked live_end there. */
403                         use_bl = get_nodes_block(user);
404                         if (is_Phi(user)) {
405                                 int pos = get_edge_src_pos(edge);
406                                 use_bl  = get_Block_cfgpred_block(use_bl, pos);
407                                 mask   |= lv_chk_state_end;
408                         }
409
410
411                         /* if the use block coincides with the query block, we
412                          * already gather a little liveness information.
413                          * The variable is surely live there, since bl != def_bl
414                          * (that case is treated above). */
415                         if (use_bl == bl)
416                                 res |= mask;
417
418                         bi = get_block_info(lv, use_bl);
419
420                         if (bi)
421                                 bitset_set(uses, bi->id);
422                 }
423
424                 /* get the dominance range which really matters. all uses outside
425                  * the definition's dominance range are not to consider. note,
426                  * that the definition itself is also not considered. The case
427                  * where bl == def_bl is considered above. */
428                 min_dom = get_Block_dom_tree_pre_num(def_bl) + 1;
429                 max_dom = get_Block_dom_max_subtree_pre_num(def_bl);
430
431                 DBG((lv->dbg, LEVEL_2, "\tuses: %B\n", uses));
432
433                 /* prepare a set with all reachable back edge targets.
434                  * this will determine our "looking points" from where
435                  * we will search/find the calculated uses. */
436                 Tq = bli->be_tgt_reach;
437
438                 /* now, visit all viewing points in the temporary bitset lying
439                  * in the dominance range of the variable. Note that for reducible
440                  * flow-graphs the first iteration is sufficient and the loop
441                  * will be left. */
442                 DBG((lv->dbg, LEVEL_2, "\tbe tgt reach: %B, dom span: [%d, %d]\n", Tq, min_dom, max_dom));
443                 i = bitset_next_set(Tq, min_dom);
444                 while (i <= max_dom) {
445                         bl_info_t *ti = lv->map[i];
446                         int use_in_current_block = bitset_is_set(uses, ti->id);
447
448                         stat_ev_cnt_inc(iter);
449
450                         /*
451                          * This is somewhat tricky. Since this routine handles both, live in
452                          * and end/out we have to handle all the border cases correctly.
453                          * Each node is in its own red_reachable set (see calculation
454                          * function above). That means, that in the case where bl == t, the
455                          * intersection check of uses and reachability below will always
456                          * find an intersection, namely t.
457                          *
458                          * However, if a block contains a use and the variable is dead
459                          * afterwards, it is not live end/out at that block. Besides
460                          * back-edge target. If a var is live-in at a back-edge target it
461                          * is also live out/end there since the variable is live in the
462                          * underlying loop. So in the case where t == bl and that is not
463                          * a back-edge target, we have to remove that use from consideration
464                          * to determine if the var is live out/end there.
465                          *
466                          * Note that the live in information has been calculated by the
467                          * uses iteration above.
468                          */
469                         if (ti == bli && !bitset_is_set(lv->back_edge_tgt, ti->id)) {
470                                 DBG((lv->dbg, LEVEL_2, "\tlooking not from a back edge target and q == t. removing use: %d\n", ti->id));
471                                 bitset_clear(uses, ti->id);
472                         }
473
474                         /* If we can reach a use, the variable is live there and we say goodbye */
475                         DBG((lv->dbg, LEVEL_2, "\tlooking from %d: seeing %B\n", ti->id, ti->red_reachable));
476                         if (bitset_intersect(ti->red_reachable, uses)) {
477                                 res |= lv_chk_state_in | lv_chk_state_out | lv_chk_state_end;
478                                 goto end;
479                         }
480
481                         /*
482                          * if we deleted a use do to the commentary above, we have to
483                          * re-add it since it might be visible from further view points
484                          * (we only need that in the non-reducible case).
485                          */
486                         if (use_in_current_block)
487                                 bitset_set(uses, ti->id);
488
489                         i = bitset_next_set(Tq, get_Block_dom_max_subtree_pre_num(ti->block) + 1);
490                 }
491
492         }
493
494 end:
495         stat_ev_tim_pop("lv_chk_query_time");
496         stat_ev_cnt_done(uses, "lv_chk_uses");
497         stat_ev_cnt_done(iter, "lv_chk_iter");
498         stat_ev_ctx_pop("lv_chk");
499
500         return res;
501 }