be: Simplify places, which still assumed, that Projs are scheduled.
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  */
25 #include "config.h"
26
27 #include <stdio.h>
28 #include <stdlib.h>
29 #include <string.h>
30
31 #include "irouts.h"
32 #include "irdump.h"
33 #include "hashptr.h"
34 #include "firmstat_t.h"
35 #include "irpass_t.h"
36 #include "pattern.h"
37 #include "dags.h"
38 #include "stat_dmp.h"
39 #include "xmalloc.h"
40 #include "irhooks.h"
41 #include "util.h"
42 #include "ircons.h"
43 #include "irtools.h"
44
45 #include "lc_opts.h"
46 #include "lc_opts_enum.h"
47
48 /*
49  * need this to be static:
50  * Special pseudo Opcodes that we need to count some interesting cases
51  */
52
53 /**
54  * The Phi0, a node that is created during SSA construction
55  */
56 static ir_op _op_Phi0;
57
58 /** The PhiM, just to count memory Phi's. */
59 static ir_op _op_PhiM;
60
61 /** The Mul by Const node. */
62 static ir_op _op_MulC;
63
64 /** The Div by Const node. */
65 static ir_op _op_DivC;
66
67 /** The Div by Const node. */
68 static ir_op _op_ModC;
69
70 /** The memory Proj node. */
71 static ir_op _op_ProjM;
72
73 /** A Sel of a Sel */
74 static ir_op _op_SelSel;
75
76 /** A Sel of a Sel of a Sel */
77 static ir_op _op_SelSelSel;
78
79 static unsigned stat_options;
80
81 /* ---------------------------------------------------------------------------------- */
82
83 /** Marks the begin of a statistic (hook) function. */
84 #define STAT_ENTER    ++status->recursive
85
86 /** Marks the end of a statistic (hook) functions. */
87 #define STAT_LEAVE    --status->recursive
88
89 /** Allows to enter a statistic function only when we are not already in a hook. */
90 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
91
92 /**
93  * global status
94  */
95 static const unsigned status_disable = 0;
96 static stat_info_t *status = (stat_info_t *)&status_disable;
97
98 /**
99  * Compare two elements of the opcode hash.
100  */
101 static int opcode_cmp(const void *elt, const void *key)
102 {
103         const node_entry_t *e1 = (const node_entry_t*)elt;
104         const node_entry_t *e2 = (const node_entry_t*)key;
105
106         return e1->op->code - e2->op->code;
107 }  /* opcode_cmp */
108
109 /**
110  * Compare two elements of the graph hash.
111  */
112 static int graph_cmp(const void *elt, const void *key)
113 {
114         const graph_entry_t *e1 = (const graph_entry_t*)elt;
115         const graph_entry_t *e2 = (const graph_entry_t*)key;
116
117         return e1->irg != e2->irg;
118 }  /* graph_cmp */
119
120 /**
121  * Compare two elements of the optimization hash.
122  */
123 static int opt_cmp(const void *elt, const void *key)
124 {
125         const opt_entry_t *e1 = (const opt_entry_t*)elt;
126         const opt_entry_t *e2 = (const opt_entry_t*)key;
127
128         return e1->op->code != e2->op->code;
129 }  /* opt_cmp */
130
131 /**
132  * Compare two elements of the block hash.
133  */
134 static int block_cmp(const void *elt, const void *key)
135 {
136         const block_entry_t *e1 = (const block_entry_t*)elt;
137         const block_entry_t *e2 = (const block_entry_t*)key;
138
139         /* it's enough to compare the block number */
140         return e1->block_nr != e2->block_nr;
141 }  /* block_cmp */
142
143 /**
144  * Compare two elements of the be_block hash.
145  */
146 static int be_block_cmp(const void *elt, const void *key)
147 {
148         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
149         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
150
151         return e1->block_nr != e2->block_nr;
152 }  /* be_block_cmp */
153
154 /**
155  * Compare two elements of reg pressure hash.
156  */
157 static int reg_pressure_cmp(const void *elt, const void *key)
158 {
159         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
160         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
161
162         return e1->class_name != e2->class_name;
163 }  /* reg_pressure_cmp */
164
165 /**
166  * Compare two elements of the perm_stat hash.
167  */
168 static int perm_stat_cmp(const void *elt, const void *key)
169 {
170         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
171         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
172
173         return e1->perm != e2->perm;
174 }  /* perm_stat_cmp */
175
176 /**
177  * Compare two elements of the perm_class hash.
178  */
179 static int perm_class_cmp(const void *elt, const void *key)
180 {
181         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
182         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
183
184         return e1->class_name != e2->class_name;
185 }  /* perm_class_cmp */
186
187 /**
188  * Compare two elements of the ir_op hash.
189  */
190 static int opcode_cmp_2(const void *elt, const void *key)
191 {
192         const ir_op *e1 = (const ir_op*)elt;
193         const ir_op *e2 = (const ir_op*)key;
194
195         return e1->code != e2->code;
196 }  /* opcode_cmp_2 */
197
198 /**
199  * Compare two elements of the address_mark set.
200  */
201 static int address_mark_cmp(const void *elt, const void *key, size_t size)
202 {
203         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
204         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
205         (void) size;
206
207         /* compare only the nodes, the rest is used as data container */
208         return e1->node != e2->node;
209 }  /* address_mark_cmp */
210
211 /**
212  * Clear all counter in a node_entry_t.
213  */
214 static void opcode_clear_entry(node_entry_t *elem)
215 {
216         cnt_clr(&elem->cnt_alive);
217         cnt_clr(&elem->new_node);
218         cnt_clr(&elem->into_Id);
219         cnt_clr(&elem->normalized);
220 }  /* opcode_clear_entry */
221
222 /**
223  * Returns the associates node_entry_t for an ir_op (and allocates
224  * one if not yet available).
225  *
226  * @param op    the IR operation
227  * @param hmap  a hash map containing ir_op* -> node_entry_t*
228  */
229 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
230 {
231         node_entry_t key;
232         node_entry_t *elem;
233
234         key.op = op;
235
236         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
237         if (elem)
238                 return elem;
239
240         elem = OALLOCZ(&status->cnts, node_entry_t);
241
242         /* clear counter */
243         opcode_clear_entry(elem);
244
245         elem->op = op;
246
247         return (node_entry_t*)pset_insert(hmap, elem, op->code);
248 }  /* opcode_get_entry */
249
250 /**
251  * Returns the associates ir_op for an opcode
252  *
253  * @param code  the IR opcode
254  * @param hmap  the hash map containing opcode -> ir_op*
255  */
256 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
257 {
258         ir_op key;
259
260         key.code = code;
261         return (ir_op*)pset_find(hmap, &key, code);
262 }  /* opcode_find_entry */
263
264 /**
265  * Clears all counter in a graph_entry_t.
266  *
267  * @param elem  the graph entry
268  * @param all   if non-zero, clears all counters, else leave accumulated ones
269  */
270 static void graph_clear_entry(graph_entry_t *elem, int all)
271 {
272         int i;
273
274         /* clear accumulated / non-accumulated counter */
275         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
276                 cnt_clr(&elem->cnt[i]);
277         }  /* for */
278
279         if (elem->block_hash) {
280                 del_pset(elem->block_hash);
281                 elem->block_hash = NULL;
282         }  /* if */
283
284         obstack_free(&elem->recalc_cnts, NULL);
285         obstack_init(&elem->recalc_cnts);
286 }  /* graph_clear_entry */
287
288 /**
289  * Returns the associated graph_entry_t for an IR graph.
290  *
291  * @param irg   the IR graph, NULL for the global counter
292  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
293  */
294 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
295 {
296         graph_entry_t key;
297         graph_entry_t *elem;
298         size_t i;
299
300         key.irg = irg;
301
302         elem = (graph_entry_t*)pset_find(hmap, &key, hash_ptr(irg));
303
304         if (elem) {
305                 /* create hash map backend block information */
306                 if (! elem->be_block_hash)
307                         elem->be_block_hash = new_pset(be_block_cmp, 5);
308
309                 return elem;
310         }  /* if */
311
312         /* allocate a new one */
313         elem = OALLOCZ(&status->cnts, graph_entry_t);
314         obstack_init(&elem->recalc_cnts);
315
316         /* clear counter */
317         graph_clear_entry(elem, 1);
318
319         /* new hash table for opcodes here  */
320         elem->opcode_hash   = new_pset(opcode_cmp, 5);
321         elem->address_mark  = new_set(address_mark_cmp, 5);
322         elem->irg           = irg;
323
324         /* these hash tables are created on demand */
325         elem->block_hash = NULL;
326
327         for (i = 0; i != ARRAY_SIZE(elem->opt_hash); ++i)
328                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
329
330         return (graph_entry_t*)pset_insert(hmap, elem, hash_ptr(irg));
331 }  /* graph_get_entry */
332
333 /**
334  * Clear all counter in an opt_entry_t.
335  */
336 static void opt_clear_entry(opt_entry_t *elem)
337 {
338         cnt_clr(&elem->count);
339 }  /* opt_clear_entry */
340
341 /**
342  * Returns the associated opt_entry_t for an IR operation.
343  *
344  * @param op    the IR operation
345  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
346  */
347 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
348 {
349         opt_entry_t key;
350         opt_entry_t *elem;
351
352         key.op = op;
353
354         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
355         if (elem)
356                 return elem;
357
358         elem = OALLOCZ(&status->cnts, opt_entry_t);
359
360         /* clear new counter */
361         opt_clear_entry(elem);
362
363         elem->op = op;
364
365         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
366 }  /* opt_get_entry */
367
368 /**
369  * clears all counter in a block_entry_t
370  */
371 static void block_clear_entry(block_entry_t *elem)
372 {
373         int i;
374
375         for (i = 0; i < _bcnt_last; ++i)
376                 cnt_clr(&elem->cnt[i]);
377 }  /* block_clear_entry */
378
379 /**
380  * Returns the associated block_entry_t for an block.
381  *
382  * @param block_nr  an IR  block number
383  * @param hmap      a hash map containing long -> block_entry_t
384  */
385 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
386 {
387         block_entry_t key;
388         block_entry_t *elem;
389
390         key.block_nr = block_nr;
391
392         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
393         if (elem)
394                 return elem;
395
396         elem = OALLOCZ(obst, block_entry_t);
397
398         /* clear new counter */
399         block_clear_entry(elem);
400
401         elem->block_nr = block_nr;
402
403         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
404 }  /* block_get_entry */
405
406 /**
407  * Clear all sets in be_block_entry_t.
408  */
409 static void be_block_clear_entry(be_block_entry_t *elem)
410 {
411         if (elem->reg_pressure)
412                 del_pset(elem->reg_pressure);
413
414         if (elem->sched_ready)
415                 stat_delete_distrib_tbl(elem->sched_ready);
416
417         if (elem->perm_class_stat)
418                 del_pset(elem->perm_class_stat);
419
420         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
421         elem->sched_ready     = stat_new_int_distrib_tbl();
422         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
423 }  /* be_block_clear_entry */
424
425 /**
426  * Returns the associated be_block_entry_t for an block.
427  *
428  * @param block_nr  an IR  block number
429  * @param hmap      a hash map containing long -> be_block_entry_t
430  */
431 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
432 {
433         be_block_entry_t key;
434         be_block_entry_t *elem;
435
436         key.block_nr = block_nr;
437
438         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
439         if (elem)
440                 return elem;
441
442         elem = OALLOCZ(obst, be_block_entry_t);
443
444         /* clear new counter */
445         be_block_clear_entry(elem);
446
447         elem->block_nr = block_nr;
448
449         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
450 }  /* be_block_get_entry */
451
452 /**
453  * clears all sets in perm_class_entry_t
454  */
455 static void perm_class_clear_entry(perm_class_entry_t *elem)
456 {
457         if (elem->perm_stat)
458                 del_pset(elem->perm_stat);
459
460         elem->perm_stat = new_pset(perm_stat_cmp, 5);
461 }  /* perm_class_clear_entry */
462
463 /**
464  * Returns the associated perm_class entry for a register class.
465  *
466  * @param class_name  the register class name
467  * @param hmap        a hash map containing class_name -> perm_class_entry_t
468  */
469 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
470                                                 hmap_perm_class_entry_t *hmap)
471 {
472         perm_class_entry_t key;
473         perm_class_entry_t *elem;
474
475         key.class_name = class_name;
476
477         elem = (perm_class_entry_t*)pset_find(hmap, &key, hash_ptr(class_name));
478         if (elem)
479                 return elem;
480
481         elem = OALLOCZ(obst, perm_class_entry_t);
482
483         /* clear new counter */
484         perm_class_clear_entry(elem);
485
486         elem->class_name = class_name;
487
488         return (perm_class_entry_t*)pset_insert(hmap, elem, hash_ptr(class_name));
489 }  /* perm_class_get_entry */
490
491 /**
492  * clears all sets in perm_stat_entry_t
493  */
494 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
495 {
496         if (elem->chains)
497                 stat_delete_distrib_tbl(elem->chains);
498
499         if (elem->cycles)
500                 stat_delete_distrib_tbl(elem->cycles);
501
502         elem->chains = stat_new_int_distrib_tbl();
503         elem->cycles = stat_new_int_distrib_tbl();
504 }  /* perm_stat_clear_entry */
505
506 /**
507  * Returns the associated perm_stat entry for a perm.
508  *
509  * @param perm      the perm node
510  * @param hmap      a hash map containing perm -> perm_stat_entry_t
511  */
512 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
513 {
514         perm_stat_entry_t key;
515         perm_stat_entry_t *elem;
516
517         key.perm = perm;
518
519         elem = (perm_stat_entry_t*)pset_find(hmap, &key, hash_ptr(perm));
520         if (elem)
521                 return elem;
522
523         elem = OALLOCZ(obst, perm_stat_entry_t);
524
525         /* clear new counter */
526         perm_stat_clear_entry(elem);
527
528         elem->perm = perm;
529
530         return (perm_stat_entry_t*)pset_insert(hmap, elem, hash_ptr(perm));
531 }  /* perm_stat_get_entry */
532
533 /**
534  * Clear optimizations counter,
535  */
536 static void clear_optimization_counter(void)
537 {
538         int i;
539         for (i = 0; i < FS_OPT_MAX; ++i)
540                 cnt_clr(&status->num_opts[i]);
541 }
542
543 /**
544  * Returns the ir_op for an IR-node,
545  * handles special cases and return pseudo op codes.
546  *
547  * @param none  an IR node
548  */
549 static ir_op *stat_get_irn_op(ir_node *node)
550 {
551         ir_op *op = get_irn_op(node);
552         unsigned opc = op->code;
553
554         switch (opc) {
555         case iro_Phi:
556                 if (get_irn_arity(node) == 0) {
557                         /* special case, a Phi0 node, count on extra counter */
558                         op = status->op_Phi0 ? status->op_Phi0 : op;
559                 } else if (get_irn_mode(node) == mode_M) {
560                         /* special case, a Memory Phi node, count on extra counter */
561                         op = status->op_PhiM ? status->op_PhiM : op;
562                 }  /* if */
563                 break;
564         case iro_Proj:
565                 if (get_irn_mode(node) == mode_M) {
566                         /* special case, a Memory Proj node, count on extra counter */
567                         op = status->op_ProjM ? status->op_ProjM : op;
568                 }  /* if */
569                 break;
570         case iro_Mul:
571                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
572                         /* special case, a Multiply by a const, count on extra counter */
573                         op = status->op_MulC ? status->op_MulC : op;
574                 }  /* if */
575                 break;
576         case iro_Div:
577                 if (is_Const(get_Div_right(node))) {
578                         /* special case, a division by a const, count on extra counter */
579                         op = status->op_DivC ? status->op_DivC : op;
580                 }  /* if */
581                 break;
582         case iro_Mod:
583                 if (is_Const(get_Mod_right(node))) {
584                         /* special case, a module by a const, count on extra counter */
585                         op = status->op_ModC ? status->op_ModC : op;
586                 }  /* if */
587                 break;
588         case iro_Sel:
589                 if (is_Sel(get_Sel_ptr(node))) {
590                         /* special case, a Sel of a Sel, count on extra counter */
591                         op = status->op_SelSel ? status->op_SelSel : op;
592                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
593                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
594                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
595                         }  /* if */
596                 }  /* if */
597                 break;
598         default:
599                 break;
600         }  /* switch */
601
602         return op;
603 }  /* stat_get_irn_op */
604
605 /**
606  * update the block counter
607  */
608 static void undate_block_info(ir_node *node, graph_entry_t *graph)
609 {
610         ir_node *block;
611         block_entry_t *b_entry;
612         int i, arity;
613
614         /* check for block */
615         if (is_Block(node)) {
616                 arity = get_irn_arity(node);
617                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
618                 /* mark start end block to allow to filter them out */
619                 if (node == get_irg_start_block(graph->irg))
620                         b_entry->is_start = 1;
621                 else if (node == get_irg_end_block(graph->irg))
622                         b_entry->is_end = 1;
623
624                 /* count all incoming edges */
625                 for (i = 0; i < arity; ++i) {
626                         ir_node *pred = get_irn_n(node, i);
627                         ir_node *other_block = get_nodes_block(pred);
628                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
629
630                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
631                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
632                 }  /* for */
633                 return;
634         }  /* if */
635
636         block   = get_nodes_block(node);
637         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
638
639         if (is_Phi(node) && mode_is_datab(get_irn_mode(node))) {
640                 /* count data Phi per block */
641                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
642         }  /* if */
643
644         /* we have a new node in our block */
645         cnt_inc(&b_entry->cnt[bcnt_nodes]);
646
647         /* don't count keep-alive edges */
648         if (is_End(node))
649                 return;
650
651         arity = get_irn_arity(node);
652
653         for (i = 0; i < arity; ++i) {
654                 ir_node *pred = get_irn_n(node, i);
655                 ir_node *other_block;
656
657                 other_block = get_nodes_block(pred);
658
659                 if (other_block == block)
660                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
661                 else {
662                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
663
664                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
665                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
666                 }  /* if */
667         }  /* for */
668 }  /* undate_block_info */
669
670 /**
671  * Calculates how many arguments of the call are const, updates
672  * param distribution.
673  */
674 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
675 {
676         int i, num_const_args = 0, num_local_adr = 0;
677         int n = get_Call_n_params(call);
678
679         for (i = 0; i < n; ++i) {
680                 ir_node *param = get_Call_param(call, i);
681
682                 if (is_irn_constlike(param))
683                         ++num_const_args;
684                 else if (is_Sel(param)) {
685                         ir_node *base = param;
686
687                         do {
688                                 base = get_Sel_ptr(base);
689                         } while (is_Sel(base));
690
691                         if (base == get_irg_frame(current_ir_graph))
692                                 ++num_local_adr;
693                 }
694
695         }  /* for */
696
697         if (num_const_args > 0)
698                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
699         if (num_const_args == n)
700                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
701         if (num_local_adr > 0)
702                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
703
704         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
705 }  /* analyse_params_of_Call */
706
707 /**
708  * Update info on calls.
709  *
710  * @param call   The call
711  * @param graph  The graph entry containing the call
712  */
713 static void stat_update_call(ir_node *call, graph_entry_t *graph)
714 {
715         ir_node   *block = get_nodes_block(call);
716         ir_node   *ptr = get_Call_ptr(call);
717         ir_entity *ent = NULL;
718         ir_graph  *callee = NULL;
719
720         /*
721          * If the block is bad, the whole subgraph will collapse later
722          * so do not count this call.
723          * This happens in dead code.
724          */
725         if (is_Bad(block))
726                 return;
727
728         cnt_inc(&graph->cnt[gcnt_all_calls]);
729
730         /* found a call, this function is not a leaf */
731         graph->is_leaf = 0;
732
733         if (is_SymConst(ptr)) {
734                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
735                         /* ok, we seems to know the entity */
736                         ent = get_SymConst_entity(ptr);
737                         callee = get_entity_irg(ent);
738
739                         /* it is recursive, if it calls at least once */
740                         if (callee == graph->irg)
741                                 graph->is_recursive = 1;
742                         if (callee == NULL)
743                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
744                 }  /* if */
745         } else {
746                 /* indirect call, be could not predict */
747                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
748
749                 /* NOT a leaf call */
750                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
751         }  /* if */
752
753         /* check, if it's a chain-call: Then, the call-block
754          * must dominate the end block. */
755         {
756                 ir_node *curr = get_irg_end_block(graph->irg);
757                 int depth = get_Block_dom_depth(block);
758
759                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
760                         curr = get_Block_idom(curr);
761
762                         if (! curr || !is_Block(curr))
763                                 break;
764                 }  /* for */
765
766                 if (curr != block)
767                         graph->is_chain_call = 0;
768         }
769
770         /* check, if the callee is a leaf */
771         if (callee) {
772                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
773
774                 if (called->is_analyzed) {
775                         if (! called->is_leaf)
776                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
777                 }  /* if */
778         }  /* if */
779
780         analyse_params_of_Call(graph, call);
781 }  /* stat_update_call */
782
783 /**
784  * Update info on calls for graphs on the wait queue.
785  */
786 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
787 {
788         ir_node   *block = get_nodes_block(call);
789         ir_node   *ptr = get_Call_ptr(call);
790         ir_entity *ent = NULL;
791         ir_graph  *callee = NULL;
792
793         /*
794          * If the block is bad, the whole subgraph will collapse later
795          * so do not count this call.
796          * This happens in dead code.
797          */
798         if (is_Bad(block))
799                 return;
800
801         if (is_SymConst(ptr)) {
802                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
803                         /* ok, we seems to know the entity */
804                         ent = get_SymConst_entity(ptr);
805                         callee = get_entity_irg(ent);
806                 }  /* if */
807         }  /* if */
808
809         /* check, if the callee is a leaf */
810         if (callee) {
811                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
812
813                 assert(called->is_analyzed);
814
815                 if (! called->is_leaf)
816                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
817         } else
818                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
819 }  /* stat_update_call_2 */
820
821 /**
822  * Find the base address and entity of an Sel node.
823  *
824  * @param sel  the node
825  *
826  * @return the base address.
827  */
828 static ir_node *find_base_adr(ir_node *sel)
829 {
830         ir_node *ptr = get_Sel_ptr(sel);
831
832         while (is_Sel(ptr)) {
833                 sel = ptr;
834                 ptr = get_Sel_ptr(sel);
835         }
836         return ptr;
837 }  /* find_base_adr */
838
839 /**
840  * Update info on Load/Store address statistics.
841  */
842 static void stat_update_address(ir_node *node, graph_entry_t *graph)
843 {
844         unsigned opc = get_irn_opcode(node);
845         ir_node *base;
846         ir_graph *irg;
847
848         switch (opc) {
849         case iro_SymConst:
850                 /* a global address */
851                 cnt_inc(&graph->cnt[gcnt_global_adr]);
852                 break;
853         case iro_Sel:
854                 base = find_base_adr(node);
855                 irg = current_ir_graph;
856                 if (base == get_irg_frame(irg)) {
857                         /* a local Variable. */
858                         cnt_inc(&graph->cnt[gcnt_local_adr]);
859                 } else {
860                         /* Pointer access */
861                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
862                                 /* pointer access through parameter, check for THIS */
863                                 ir_entity *ent = get_irg_entity(irg);
864
865                                 if (ent != NULL) {
866                                         ir_type *ent_tp = get_entity_type(ent);
867
868                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
869                                                 if (get_Proj_proj(base) == 0) {
870                                                         /* THIS pointer */
871                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
872                                                         goto end_parameter;
873                                                 }  /* if */
874                                         }  /* if */
875                                 }  /* if */
876                                 /* other parameter */
877                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
878 end_parameter: ;
879                         } else {
880                                 /* unknown Pointer access */
881                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
882                         }  /* if */
883                 }  /* if */
884         default:
885                 break;
886         }  /* switch */
887 }  /* stat_update_address */
888
889 /**
890  * Walker for reachable nodes count.
891  */
892 static void update_node_stat(ir_node *node, void *env)
893 {
894         graph_entry_t *graph = (graph_entry_t*)env;
895         node_entry_t *entry;
896
897         ir_op *op = stat_get_irn_op(node);
898         int i, arity = get_irn_arity(node);
899
900         entry = opcode_get_entry(op, graph->opcode_hash);
901
902         cnt_inc(&entry->cnt_alive);
903         cnt_add_i(&graph->cnt[gcnt_edges], arity);
904
905         /* count block edges */
906         undate_block_info(node, graph);
907
908         /* handle statistics for special node types */
909
910         switch (op->code) {
911         case iro_Call:
912                 /* check for properties that depends on calls like recursion/leaf/indirect call */
913                 stat_update_call(node, graph);
914                 break;
915         case iro_Load:
916                 /* check address properties */
917                 stat_update_address(get_Load_ptr(node), graph);
918                 break;
919         case iro_Store:
920                 /* check address properties */
921                 stat_update_address(get_Store_ptr(node), graph);
922                 break;
923         case iro_Phi:
924                 /* check for non-strict Phi nodes */
925                 for (i = arity - 1; i >= 0; --i) {
926                         ir_node *pred = get_Phi_pred(node, i);
927                         if (is_Unknown(pred)) {
928                                 /* found an Unknown predecessor, graph is not strict */
929                                 graph->is_strict = 0;
930                                 break;
931                         }
932                 }
933         default:
934                 break;
935         }  /* switch */
936
937         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
938         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
939                 int i;
940
941                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
942                         ir_node *pred = get_irn_n(node, i);
943
944                         if (is_Const(pred)) {
945                                 /* check properties of constants */
946                                 stat_update_const(status, pred, graph);
947                         }  /* if */
948                 }  /* for */
949         }  /* if */
950 }  /* update_node_stat */
951
952 /**
953  * Walker for reachable nodes count for graphs on the wait_q.
954  */
955 static void update_node_stat_2(ir_node *node, void *env)
956 {
957         graph_entry_t *graph = (graph_entry_t*)env;
958
959         /* check for properties that depends on calls like recursion/leaf/indirect call */
960         if (is_Call(node))
961                 stat_update_call_2(node, graph);
962 }  /* update_node_stat_2 */
963
964 /**
965  * Get the current address mark.
966  */
967 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
968 {
969         address_mark_entry_t const val = { node, 0 };
970         address_mark_entry_t *value = set_find(address_mark_entry_t, graph->address_mark, &val, sizeof(val), hash_ptr(node));
971
972         return value ? value->mark : 0;
973 }  /* get_adr_mark */
974
975 /**
976  * Set the current address mark.
977  */
978 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
979 {
980         address_mark_entry_t const value = { node, val };
981         (void)set_insert(address_mark_entry_t, graph->address_mark, &value, sizeof(value), hash_ptr(node));
982 }  /* set_adr_mark */
983
984 #undef DUMP_ADR_MODE
985
986 #ifdef DUMP_ADR_MODE
987 /**
988  * a vcg attribute hook: Color a node with a different color if
989  * it's identified as a part of an address expression or at least referenced
990  * by an address expression.
991  */
992 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
993 {
994         ir_node *n           = local ? local : node;
995         ir_graph *irg        = get_irn_irg(n);
996         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
997         unsigned mark        = get_adr_mark(graph, n);
998
999         if (mark & MARK_ADDRESS_CALC)
1000                 fprintf(F, "color: purple");
1001         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1002                 fprintf(F, "color: pink");
1003         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1004                 fprintf(F, "color: lightblue");
1005         else
1006                 return 0;
1007
1008         /* I know the color! */
1009         return 1;
1010 }  /* stat_adr_mark_hook */
1011 #endif /* DUMP_ADR_MODE */
1012
1013 /**
1014  * Return the "operational" mode of a Firm node.
1015  */
1016 static ir_mode *get_irn_op_mode(ir_node *node)
1017 {
1018         switch (get_irn_opcode(node)) {
1019         case iro_Load:
1020                 return get_Load_mode(node);
1021         case iro_Store:
1022                 return get_irn_mode(get_Store_value(node));
1023         case iro_Div:
1024                 return get_irn_mode(get_Div_left(node));
1025         case iro_Mod:
1026                 return get_irn_mode(get_Mod_left(node));
1027         case iro_Cmp:
1028                 /* Cmp is no address calculation, or is it? */
1029         default:
1030                 return get_irn_mode(node);
1031         }  /* switch */
1032 }  /* get_irn_op_mode */
1033
1034 /**
1035  * Post-walker that marks every node that is an address calculation.
1036  *
1037  * Users of a node must be visited first. We ensure this by
1038  * calling it in the post of an outs walk. This should work even in cycles,
1039  * while the normal pre-walk will not.
1040  */
1041 static void mark_address_calc(ir_node *node, void *env)
1042 {
1043         graph_entry_t *graph = (graph_entry_t*)env;
1044         ir_mode *mode = get_irn_op_mode(node);
1045         int i, n;
1046         unsigned mark_preds = MARK_REF_NON_ADR;
1047
1048         if (! mode_is_data(mode))
1049                 return;
1050
1051         if (mode_is_reference(mode)) {
1052                 /* a reference is calculated here, we are sure */
1053                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1054
1055                 mark_preds = MARK_REF_ADR;
1056         } else {
1057                 unsigned mark = get_adr_mark(graph, node);
1058
1059                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1060                         /*
1061                          * this node has no reference mode, but is only
1062                          * referenced by address calculations
1063                          */
1064                         mark_preds = MARK_REF_ADR;
1065                 }  /* if */
1066         }  /* if */
1067
1068         /* mark all predecessors */
1069         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1070                 ir_node *pred = get_irn_n(node, i);
1071
1072                 mode = get_irn_op_mode(pred);
1073                 if (! mode_is_data(mode))
1074                         continue;
1075
1076                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1077         }  /* for */
1078 }  /* mark_address_calc */
1079
1080 /**
1081  * Post-walker that marks every node that is an address calculation.
1082  *
1083  * Users of a node must be visited first. We ensure this by
1084  * calling it in the post of an outs walk. This should work even in cycles,
1085  * while the normal pre-walk will not.
1086  */
1087 static void count_adr_ops(ir_node *node, void *env)
1088 {
1089         graph_entry_t *graph = (graph_entry_t*)env;
1090         unsigned mark        = get_adr_mark(graph, node);
1091
1092         if (mark & MARK_ADDRESS_CALC)
1093                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1094         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1095                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1096         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1097                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1098 }  /* count_adr_ops */
1099
1100 /**
1101  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1102  * is called, must recalculate all statistic info.
1103  *
1104  * @param global    The global entry
1105  * @param graph     The current entry
1106  */
1107 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1108 {
1109         int i;
1110
1111         /* clear first the alive counter in the graph */
1112         foreach_pset(graph->opcode_hash, node_entry_t, entry) {
1113                 cnt_clr(&entry->cnt_alive);
1114         }  /* foreach_pset */
1115
1116         /* set pessimistic values */
1117         graph->is_leaf       = 1;
1118         graph->is_leaf_call  = LCS_UNKNOWN;
1119         graph->is_recursive  = 0;
1120         graph->is_chain_call = 1;
1121         graph->is_strict     = 1;
1122
1123         /* create new block counter */
1124         graph->block_hash = new_pset(block_cmp, 5);
1125
1126         /* we need dominator info */
1127         if (graph->irg != get_const_code_irg()) {
1128                 assure_doms(graph->irg);
1129         }  /* if */
1130
1131         /* count the nodes in the graph */
1132         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1133
1134 #if 0
1135         /* Uncomment this code if chain-call means call exact one. */
1136         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1137
1138         /* check if we have more than 1 call */
1139         if (cnt_gt(entry->cnt_alive, 1))
1140                 graph->is_chain_call = 0;
1141 #endif
1142
1143         /* recursive functions are never chain calls, leafs don't have calls */
1144         if (graph->is_recursive || graph->is_leaf)
1145                 graph->is_chain_call = 0;
1146
1147         /* assume we walk every graph only ONCE, we could sum here the global count */
1148         foreach_pset(graph->opcode_hash, node_entry_t, entry) {
1149                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1150
1151                 /* update the node counter */
1152                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1153         }  /* foreach_pset */
1154
1155         /* count the number of address calculation */
1156         if (graph->irg != get_const_code_irg()) {
1157                 ir_graph *rem = current_ir_graph;
1158
1159                 assure_irg_outs(graph->irg);
1160
1161                 /* Must be done an the outs graph */
1162                 current_ir_graph = graph->irg;
1163                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1164                 current_ir_graph = rem;
1165
1166 #ifdef DUMP_ADR_MODE
1167                 /* register the vcg hook and dump the graph for test */
1168                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1169                 dump_ir_block_graph(graph->irg, "-adr");
1170                 set_dump_node_vcgattr_hook(NULL);
1171 #endif /* DUMP_ADR_MODE */
1172
1173                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1174         }  /* if */
1175
1176         /* count the DAG's */
1177         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1178                 count_dags_in_graph(global, graph);
1179
1180         /* calculate the patterns of this graph */
1181         stat_calc_pattern_history(graph->irg);
1182
1183         /* leaf function did not call others */
1184         if (graph->is_leaf)
1185                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1186         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1187                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1188                 pdeq_putl(status->wait_q, graph);
1189         }  /* if */
1190
1191         /* we have analyzed this graph */
1192         graph->is_analyzed = 1;
1193
1194         /* accumulate all counter's */
1195         for (i = 0; i < _gcnt_last; ++i)
1196                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1197 }  /* update_graph_stat */
1198
1199 /**
1200  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1201  * must finish all statistic info calculations.
1202  *
1203  * @param global    The global entry
1204  * @param graph     The current entry
1205  */
1206 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1207 {
1208         (void) global;
1209         if (graph->is_deleted) {
1210                 /* deleted, ignore */
1211                 return;
1212         }
1213
1214         if (graph->irg) {
1215                 /* count the nodes in the graph */
1216                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1217
1218                 if (graph->is_leaf_call == LCS_UNKNOWN)
1219                         graph->is_leaf_call = LCS_LEAF_CALL;
1220         }  /* if */
1221 }  /* update_graph_stat_2 */
1222
1223 /**
1224  * Register a dumper.
1225  */
1226 static void stat_register_dumper(const dumper_t *dumper)
1227 {
1228         dumper_t *p = XMALLOC(dumper_t);
1229         *p = *dumper;
1230
1231         p->next        = status->dumper;
1232         p->status      = status;
1233         status->dumper = p;
1234
1235         /* FIXME: memory leak */
1236 }  /* stat_register_dumper */
1237
1238 /**
1239  * Dumps the statistics of an IR graph.
1240  */
1241 static void stat_dump_graph(graph_entry_t *entry)
1242 {
1243         dumper_t *dumper;
1244
1245         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1246                 if (dumper->dump_graph)
1247                         dumper->dump_graph(dumper, entry);
1248         }  /* for */
1249 }  /* stat_dump_graph */
1250
1251 /**
1252  * Calls all registered dumper functions.
1253  */
1254 static void stat_dump_registered(graph_entry_t *entry)
1255 {
1256         dumper_t *dumper;
1257
1258         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1259                 if (dumper->func_map) {
1260                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1261                                 func(dumper, entry);
1262                 }  /* if */
1263         }  /* for */
1264 }  /* stat_dump_registered */
1265
1266 /**
1267  * Dumps a constant table.
1268  */
1269 static void stat_dump_consts(const constant_info_t *tbl)
1270 {
1271         dumper_t *dumper;
1272
1273         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1274                 if (dumper->dump_const_tbl)
1275                         dumper->dump_const_tbl(dumper, tbl);
1276         }  /* for */
1277 }  /* stat_dump_consts */
1278
1279 /**
1280  * Dumps the parameter distribution
1281  */
1282 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1283 {
1284         dumper_t *dumper;
1285
1286         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1287                 if (dumper->dump_param_tbl)
1288                         dumper->dump_param_tbl(dumper, tbl, global);
1289         }  /* for */
1290 }  /* stat_dump_param_tbl */
1291
1292 /**
1293  * Dumps the optimization counter
1294  */
1295 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1296 {
1297         dumper_t *dumper;
1298
1299         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1300                 if (dumper->dump_opt_cnt)
1301                         dumper->dump_opt_cnt(dumper, tbl, len);
1302         }  /* for */
1303 }  /* stat_dump_opt_cnt */
1304
1305 /**
1306  * Initialize the dumper.
1307  */
1308 static void stat_dump_init(const char *name)
1309 {
1310         dumper_t *dumper;
1311
1312         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1313                 if (dumper->init)
1314                         dumper->init(dumper, name);
1315         }  /* for */
1316 }  /* stat_dump_init */
1317
1318 /**
1319  * Finish the dumper.
1320  */
1321 static void stat_dump_finish(void)
1322 {
1323         dumper_t *dumper;
1324
1325         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1326                 if (dumper->finish)
1327                         dumper->finish(dumper);
1328         }  /* for */
1329 }  /* stat_dump_finish */
1330
1331 /**
1332  * Register an additional function for all dumper.
1333  */
1334 void stat_register_dumper_func(dump_graph_FUNC *const func)
1335 {
1336         dumper_t *dumper;
1337
1338         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1339                 if (! dumper->func_map)
1340                         dumper->func_map = pset_new_ptr(3);
1341                 pset_insert_ptr(dumper->func_map, (void*)func);
1342         }  /* for */
1343 }  /* stat_register_dumper_func */
1344
1345 /* ---------------------------------------------------------------------- */
1346
1347 /*
1348  * Helper: get an ir_op from an opcode.
1349  */
1350 ir_op *stat_get_op_from_opcode(unsigned code)
1351 {
1352         return opcode_find_entry((ir_opcode)code, status->ir_op_hash);
1353 }  /* stat_get_op_from_opcode */
1354
1355 /**
1356  * Hook: A new IR op is registered.
1357  *
1358  * @param ctx  the hook context
1359  * @param op   the new IR opcode that was created.
1360  */
1361 static void stat_new_ir_op(void *ctx, ir_op *op)
1362 {
1363         (void) ctx;
1364         if (! status->stat_options)
1365                 return;
1366
1367         STAT_ENTER;
1368         {
1369                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1370
1371                 /* execute for side effect :-) */
1372                 (void)opcode_get_entry(op, graph->opcode_hash);
1373
1374                 pset_insert(status->ir_op_hash, op, op->code);
1375         }
1376         STAT_LEAVE;
1377 }  /* stat_new_ir_op */
1378
1379 /**
1380  * Hook: An IR op is freed.
1381  *
1382  * @param ctx  the hook context
1383  * @param op   the IR opcode that is freed
1384  */
1385 static void stat_free_ir_op(void *ctx, ir_op *op)
1386 {
1387         (void) ctx;
1388         (void) op;
1389         if (! status->stat_options)
1390                 return;
1391
1392         STAT_ENTER;
1393         {
1394         }
1395         STAT_LEAVE;
1396 }  /* stat_free_ir_op */
1397
1398 /**
1399  * Hook: A new node is created.
1400  *
1401  * @param ctx   the hook context
1402  * @param irg   the IR graph on which the node is created
1403  * @param node  the new IR node that was created
1404  */
1405 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1406 {
1407         (void) ctx;
1408         (void) irg;
1409         if (! status->stat_options)
1410                 return;
1411
1412         /* do NOT count during dead node elimination */
1413         if (status->in_dead_node_elim)
1414                 return;
1415
1416         STAT_ENTER;
1417         {
1418                 node_entry_t *entry;
1419                 graph_entry_t *graph;
1420                 ir_op *op = stat_get_irn_op(node);
1421
1422                 /* increase global value */
1423                 graph = graph_get_entry(NULL, status->irg_hash);
1424                 entry = opcode_get_entry(op, graph->opcode_hash);
1425                 cnt_inc(&entry->new_node);
1426
1427                 /* increase local value */
1428                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1429                 entry = opcode_get_entry(op, graph->opcode_hash);
1430                 cnt_inc(&entry->new_node);
1431         }
1432         STAT_LEAVE;
1433 }  /* stat_new_node */
1434
1435 /**
1436  * Hook: A node is changed into a Id node
1437  *
1438  * @param ctx   the hook context
1439  * @param node  the IR node that will be turned into an ID
1440  */
1441 static void stat_turn_into_id(void *ctx, ir_node *node)
1442 {
1443         (void) ctx;
1444         if (! status->stat_options)
1445                 return;
1446
1447         STAT_ENTER;
1448         {
1449                 node_entry_t *entry;
1450                 graph_entry_t *graph;
1451                 ir_op *op = stat_get_irn_op(node);
1452
1453                 /* increase global value */
1454                 graph = graph_get_entry(NULL, status->irg_hash);
1455                 entry = opcode_get_entry(op, graph->opcode_hash);
1456                 cnt_inc(&entry->into_Id);
1457
1458                 /* increase local value */
1459                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1460                 entry = opcode_get_entry(op, graph->opcode_hash);
1461                 cnt_inc(&entry->into_Id);
1462         }
1463         STAT_LEAVE;
1464 }  /* stat_turn_into_id */
1465
1466 /**
1467  * Hook: A node is normalized
1468  *
1469  * @param ctx   the hook context
1470  * @param node  the IR node that was normalized
1471  */
1472 static void stat_normalize(void *ctx, ir_node *node)
1473 {
1474         (void) ctx;
1475         if (! status->stat_options)
1476                 return;
1477
1478         STAT_ENTER;
1479         {
1480                 node_entry_t *entry;
1481                 graph_entry_t *graph;
1482                 ir_op *op = stat_get_irn_op(node);
1483
1484                 /* increase global value */
1485                 graph = graph_get_entry(NULL, status->irg_hash);
1486                 entry = opcode_get_entry(op, graph->opcode_hash);
1487                 cnt_inc(&entry->normalized);
1488
1489                 /* increase local value */
1490                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1491                 entry = opcode_get_entry(op, graph->opcode_hash);
1492                 cnt_inc(&entry->normalized);
1493         }
1494         STAT_LEAVE;
1495 }  /* stat_normalize */
1496
1497 /**
1498  * Hook: A new graph was created
1499  *
1500  * @param ctx  the hook context
1501  * @param irg  the new IR graph that was created
1502  * @param ent  the entity of this graph
1503  */
1504 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1505 {
1506         (void) ctx;
1507         if (! status->stat_options)
1508                 return;
1509
1510         STAT_ENTER;
1511         {
1512                 /* execute for side effect :-) */
1513                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1514
1515                 graph->ent           = ent;
1516                 graph->is_deleted    = 0;
1517                 graph->is_leaf       = 0;
1518                 graph->is_leaf_call  = 0;
1519                 graph->is_recursive  = 0;
1520                 graph->is_chain_call = 0;
1521                 graph->is_strict     = 1;
1522                 graph->is_analyzed   = 0;
1523         }
1524         STAT_LEAVE;
1525 }  /* stat_new_graph */
1526
1527 /**
1528  * Hook: A graph will be deleted
1529  *
1530  * @param ctx  the hook context
1531  * @param irg  the IR graph that will be deleted
1532  *
1533  * Note that we still hold the information for this graph
1534  * in our hash maps, only a flag is set which prevents this
1535  * information from being changed, it's "frozen" from now.
1536  */
1537 static void stat_free_graph(void *ctx, ir_graph *irg)
1538 {
1539         (void) ctx;
1540         if (! status->stat_options)
1541                 return;
1542
1543         STAT_ENTER;
1544         {
1545                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1546                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1547
1548                 graph->is_deleted = 1;
1549
1550                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1551                         /* count the nodes of the graph yet, it will be destroyed later */
1552                         update_graph_stat(global, graph);
1553                 }  /* if */
1554         }
1555         STAT_LEAVE;
1556 }  /* stat_free_graph */
1557
1558 /**
1559  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1560  *
1561  * @param ctx  the hook context
1562  * @param irg  the IR graph that will be walked
1563  * @param pre  the pre walker
1564  * @param post the post walker
1565  */
1566 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1567 {
1568         (void) ctx;
1569         (void) pre;
1570         (void) post;
1571         if (! status->stat_options)
1572                 return;
1573
1574         STAT_ENTER_SINGLE;
1575         {
1576                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1577
1578                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1579         }
1580         STAT_LEAVE;
1581 }  /* stat_irg_walk */
1582
1583 /**
1584  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1585  *
1586  * @param ctx  the hook context
1587  * @param irg  the IR graph that will be walked
1588  * @param pre  the pre walker
1589  * @param post the post walker
1590  */
1591 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1592 {
1593         /* for now, do NOT differentiate between blockwise and normal */
1594         stat_irg_walk(ctx, irg, pre, post);
1595 }  /* stat_irg_walk_blkwise */
1596
1597 /**
1598  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1599  *
1600  * @param ctx  the hook context
1601  * @param irg  the IR graph that will be walked
1602  * @param node the IR node
1603  * @param pre  the pre walker
1604  * @param post the post walker
1605  */
1606 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1607 {
1608         (void) ctx;
1609         (void) node;
1610         (void) pre;
1611         (void) post;
1612         if (! status->stat_options)
1613                 return;
1614
1615         STAT_ENTER_SINGLE;
1616         {
1617                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1618
1619                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1620         }
1621         STAT_LEAVE;
1622 }  /* stat_irg_block_walk */
1623
1624 /**
1625  * Called for every node that is removed due to an optimization.
1626  *
1627  * @param n     the IR node that will be removed
1628  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1629  * @param kind  the optimization kind
1630  */
1631 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1632 {
1633         opt_entry_t *entry;
1634         ir_op *op = stat_get_irn_op(n);
1635
1636         /* ignore CSE for Constants */
1637         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1638                 return;
1639
1640         /* increase global value */
1641         entry = opt_get_entry(op, hmap);
1642         cnt_inc(&entry->count);
1643 }  /* removed_due_opt */
1644
1645 /**
1646  * Hook: Some nodes were optimized into some others due to an optimization.
1647  *
1648  * @param ctx  the hook context
1649  */
1650 static void stat_merge_nodes(
1651     void *ctx,
1652     ir_node **new_node_array, int new_num_entries,
1653     ir_node **old_node_array, int old_num_entries,
1654     hook_opt_kind opt)
1655 {
1656         (void) ctx;
1657         if (! status->stat_options)
1658                 return;
1659
1660         STAT_ENTER;
1661         {
1662                 int i, j;
1663                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1664
1665                 cnt_inc(&status->num_opts[opt]);
1666                 if (status->reassoc_run)
1667                         opt = HOOK_OPT_REASSOC;
1668
1669                 for (i = 0; i < old_num_entries; ++i) {
1670                         /* nodes might be in new and old, so if we found a node
1671                            in both sets, this one  is NOT removed */
1672                         for (j = 0; j < new_num_entries; ++j) {
1673                                 if (old_node_array[i] == new_node_array[j])
1674                                         break;
1675                         }  /* for */
1676                         if (j >= new_num_entries) {
1677                                 int xopt = opt;
1678
1679                                 /* sometimes we did not detect, that it is replaced by a Const */
1680                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1681                                         ir_node *const irn = new_node_array[0];
1682                                         if (is_Const(irn) || is_SymConst(irn))
1683                                                 xopt = HOOK_OPT_CONFIRM_C;
1684                                 }  /* if */
1685
1686                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1687                         }  /* if */
1688                 }  /* for */
1689         }
1690         STAT_LEAVE;
1691 }  /* stat_merge_nodes */
1692
1693 /**
1694  * Hook: Reassociation is started/stopped.
1695  *
1696  * @param ctx   the hook context
1697  * @param flag  if non-zero, reassociation is started else stopped
1698  */
1699 static void stat_reassociate(void *ctx, int flag)
1700 {
1701         (void) ctx;
1702         if (! status->stat_options)
1703                 return;
1704
1705         STAT_ENTER;
1706         {
1707                 status->reassoc_run = flag;
1708         }
1709         STAT_LEAVE;
1710 }  /* stat_reassociate */
1711
1712 /**
1713  * Hook: A node was lowered into other nodes
1714  *
1715  * @param ctx  the hook context
1716  * @param node the IR node that will be lowered
1717  */
1718 static void stat_lower(void *ctx, ir_node *node)
1719 {
1720         (void) ctx;
1721         if (! status->stat_options)
1722                 return;
1723
1724         STAT_ENTER;
1725         {
1726                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1727
1728                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1729         }
1730         STAT_LEAVE;
1731 }  /* stat_lower */
1732
1733 /**
1734  * Hook: A graph was inlined.
1735  *
1736  * @param ctx  the hook context
1737  * @param call the IR call that will re changed into the body of
1738  *             the called IR graph
1739  * @param called_irg  the IR graph representing the called routine
1740  */
1741 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1742 {
1743         (void) ctx;
1744         if (! status->stat_options)
1745                 return;
1746
1747         STAT_ENTER;
1748         {
1749                 ir_graph *irg = get_irn_irg(call);
1750                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1751                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1752
1753                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1754                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1755         }
1756         STAT_LEAVE;
1757 }  /* stat_inline */
1758
1759 /**
1760  * Hook: A graph with tail-recursions was optimized.
1761  *
1762  * @param ctx  the hook context
1763  */
1764 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1765 {
1766         (void) ctx;
1767         if (! status->stat_options)
1768                 return;
1769
1770         STAT_ENTER;
1771         {
1772                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1773
1774                 graph->num_tail_recursion += n_calls;
1775         }
1776         STAT_LEAVE;
1777 }  /* stat_tail_rec */
1778
1779 /**
1780  * Strength reduction was performed on an iteration variable.
1781  *
1782  * @param ctx  the hook context
1783  */
1784 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1785 {
1786         (void) ctx;
1787         if (! status->stat_options)
1788                 return;
1789
1790         STAT_ENTER;
1791         {
1792                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1793                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1794
1795                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1796         }
1797         STAT_LEAVE;
1798 }  /* stat_strength_red */
1799
1800 /**
1801  * Hook: Start/Stop the dead node elimination.
1802  *
1803  * @param ctx  the hook context
1804  */
1805 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1806 {
1807         (void) ctx;
1808         (void) irg;
1809         if (! status->stat_options)
1810                 return;
1811
1812         status->in_dead_node_elim = (start != 0);
1813 }  /* stat_dead_node_elim */
1814
1815 /**
1816  * Hook: if-conversion was tried.
1817  */
1818 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1819                                int pos, ir_node *mux, if_result_t reason)
1820 {
1821         (void) context;
1822         (void) phi;
1823         (void) pos;
1824         (void) mux;
1825         if (! status->stat_options)
1826                 return;
1827
1828         STAT_ENTER;
1829         {
1830                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1831
1832                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1833         }
1834         STAT_LEAVE;
1835 }  /* stat_if_conversion */
1836
1837 /**
1838  * Hook: real function call was optimized.
1839  */
1840 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1841 {
1842         (void) context;
1843         (void) call;
1844         if (! status->stat_options)
1845                 return;
1846
1847         STAT_ENTER;
1848         {
1849                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1850
1851                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1852         }
1853         STAT_LEAVE;
1854 }  /* stat_func_call */
1855
1856 /**
1857  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1858  *
1859  * @param ctx  the hook context
1860  */
1861 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1862 {
1863         (void) ctx;
1864         if (! status->stat_options)
1865                 return;
1866
1867         STAT_ENTER;
1868         {
1869                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1870                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1871         }
1872         STAT_LEAVE;
1873 }  /* stat_arch_dep_replace_mul_with_shifts */
1874
1875 /**
1876  * Hook: A division by const was replaced.
1877  *
1878  * @param ctx   the hook context
1879  * @param node  the division node that will be optimized
1880  */
1881 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1882 {
1883         (void) ctx;
1884         if (! status->stat_options)
1885                 return;
1886
1887         STAT_ENTER;
1888         {
1889                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1890                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1891         }
1892         STAT_LEAVE;
1893 }  /* stat_arch_dep_replace_division_by_const */
1894
1895 /*
1896  * Update the register pressure of a block.
1897  *
1898  * @param irg        the irg containing the block
1899  * @param block      the block for which the reg pressure should be set
1900  * @param pressure   the pressure
1901  * @param class_name the name of the register class
1902  */
1903 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1904 {
1905         if (! status->stat_options)
1906                 return;
1907
1908         STAT_ENTER;
1909         {
1910                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1911                 be_block_entry_t     *block_ent;
1912                 reg_pressure_entry_t *rp_ent;
1913
1914                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1915                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
1916
1917                 rp_ent->class_name = class_name;
1918                 rp_ent->pressure   = pressure;
1919
1920                 pset_insert(block_ent->reg_pressure, rp_ent, hash_ptr(class_name));
1921         }
1922         STAT_LEAVE;
1923 }  /* stat_be_block_regpressure */
1924
1925 /**
1926  * Update the distribution of ready nodes of a block
1927  *
1928  * @param irg        the irg containing the block
1929  * @param block      the block for which the reg pressure should be set
1930  * @param num_ready  the number of ready nodes
1931  */
1932 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
1933 {
1934         if (! status->stat_options)
1935                 return;
1936
1937         STAT_ENTER;
1938         {
1939                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
1940                 be_block_entry_t *block_ent;
1941
1942                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1943
1944                 /* increase the counter of corresponding number of ready nodes */
1945                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
1946         }
1947         STAT_LEAVE;
1948 }  /* stat_be_block_sched_ready */
1949
1950 /**
1951  * Update the permutation statistic of a block.
1952  *
1953  * @param class_name the name of the register class
1954  * @param n_regs     number of registers in the register class
1955  * @param perm       the perm node
1956  * @param block      the block containing the perm
1957  * @param size       the size of the perm
1958  * @param real_size  number of pairs with different registers
1959  */
1960 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
1961                              int size, int real_size)
1962 {
1963         if (! status->stat_options)
1964                 return;
1965
1966         STAT_ENTER;
1967         {
1968                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
1969                 be_block_entry_t   *block_ent;
1970                 perm_class_entry_t *pc_ent;
1971                 perm_stat_entry_t  *ps_ent;
1972
1973                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1974                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
1975                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
1976
1977                 pc_ent->n_regs = n_regs;
1978
1979                 /* update information */
1980                 ps_ent->size      = size;
1981                 ps_ent->real_size = real_size;
1982         }
1983         STAT_LEAVE;
1984 }  /* stat_be_block_stat_perm */
1985
1986 /**
1987  * Update the permutation statistic of a single perm.
1988  *
1989  * @param class_name the name of the register class
1990  * @param perm       the perm node
1991  * @param block      the block containing the perm
1992  * @param is_chain   1 if chain, 0 if cycle
1993  * @param size       length of the cycle/chain
1994  * @param n_ops      the number of ops representing this cycle/chain after lowering
1995  */
1996 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
1997                                   int is_chain, int size, int n_ops)
1998 {
1999         if (! status->stat_options)
2000                 return;
2001
2002         STAT_ENTER;
2003         {
2004                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2005                 be_block_entry_t   *block_ent;
2006                 perm_class_entry_t *pc_ent;
2007                 perm_stat_entry_t  *ps_ent;
2008
2009                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2010                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2011                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2012
2013                 if (is_chain) {
2014                         ps_ent->n_copies += n_ops;
2015                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2016                 } else {
2017                         ps_ent->n_exchg += n_ops;
2018                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2019                 }  /* if */
2020         }
2021         STAT_LEAVE;
2022 }  /* stat_be_block_stat_permcycle */
2023
2024 /* Dumps a statistics snapshot. */
2025 void stat_dump_snapshot(const char *name, const char *phase)
2026 {
2027         char fname[2048];
2028         const char *p;
2029         size_t l;
2030
2031         if (! status->stat_options)
2032                 return;
2033
2034         STAT_ENTER;
2035         {
2036                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2037
2038                 /*
2039                  * The constant counter is only global, so we clear it here.
2040                  * Note that it does NOT contain the constants in DELETED
2041                  * graphs due to this.
2042                  */
2043                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2044                         stat_const_clear(status);
2045
2046                 /* build the name */
2047                 p = strrchr(name, '/');
2048 #ifdef _WIN32
2049                 {
2050                         const char *q;
2051
2052                         q = strrchr(name, '\\');
2053
2054                         /* NULL might be not the smallest pointer */
2055                         if (q && (!p || q > p))
2056                                 p = q;
2057                 }
2058 #endif /* _WIN32 */
2059                 if (p) {
2060                         ++p;
2061                         l = p - name;
2062
2063                         if (l > (int) (sizeof(fname) - 1))
2064                                 l = sizeof(fname) - 1;
2065
2066                         memcpy(fname, name, l);
2067                         fname[l] = '\0';
2068                 } else {
2069                         fname[0] = '\0';
2070                         p = name;
2071                 }  /* if */
2072                 strncat(fname, "firmstat-", sizeof(fname)-strlen(fname)-1);
2073                 strncat(fname, phase,       sizeof(fname)-strlen(fname)-1);
2074                 strncat(fname, "-",         sizeof(fname)-strlen(fname)-1);
2075                 strncat(fname, p,           sizeof(fname)-strlen(fname)-1);
2076
2077                 stat_dump_init(fname);
2078
2079                 /* calculate the graph statistics */
2080                 foreach_pset(status->irg_hash, graph_entry_t, entry) {
2081                         if (entry->irg == NULL) {
2082                                 /* special entry for the global count */
2083                                 continue;
2084                         }  /* if */
2085                         if (! entry->is_deleted) {
2086                                 /* the graph is still alive, count the nodes on it */
2087                                 update_graph_stat(global, entry);
2088                         }  /* if */
2089                 }  /* for */
2090
2091                 /* some calculations are dependent, we pushed them on the wait_q */
2092                 while (! pdeq_empty(status->wait_q)) {
2093                         graph_entry_t *const entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2094
2095                         update_graph_stat_2(global, entry);
2096                 }  /* while */
2097
2098                 /* dump per graph */
2099                 foreach_pset(status->irg_hash, graph_entry_t, entry) {
2100                         if (entry->irg == NULL) {
2101                                 /* special entry for the global count */
2102                                 continue;
2103                         }  /* if */
2104
2105                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2106                                 stat_dump_graph(entry);
2107                                 stat_dump_registered(entry);
2108                         }  /* if */
2109
2110                         if (! entry->is_deleted) {
2111                                 /* clear the counter that are not accumulated */
2112                                 graph_clear_entry(entry, 0);
2113                         }  /* if */
2114                 }  /* for */
2115
2116                 /* dump global */
2117                 stat_dump_graph(global);
2118
2119                 /* dump the const info */
2120                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2121                         stat_dump_consts(&status->const_info);
2122
2123                 /* dump the parameter distribution */
2124                 stat_dump_param_tbl(status->dist_param_cnt, global);
2125
2126                 /* dump the optimization counter and clear them */
2127                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2128                 clear_optimization_counter();
2129
2130                 stat_dump_finish();
2131
2132                 stat_finish_pattern_history(fname);
2133
2134                 /* clear the global counters here */
2135                 foreach_pset(global->opcode_hash, node_entry_t, entry) {
2136                         opcode_clear_entry(entry);
2137                 }  /* for */
2138                 /* clear all global counter */
2139                 graph_clear_entry(global, /*all=*/1);
2140         }
2141         STAT_LEAVE;
2142 }  /* stat_dump_snapshot */
2143
2144 typedef struct pass_t {
2145         ir_prog_pass_t pass;
2146         const char     *fname;
2147         const char     *phase;
2148 } pass_t;
2149
2150 /**
2151  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2152  */
2153 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2154 {
2155         pass_t *pass = (pass_t*)context;
2156
2157         (void)irp;
2158         stat_dump_snapshot(pass->fname, pass->phase);
2159         return 0;
2160 }  /* stat_dump_snapshot_wrapper */
2161
2162 /**
2163  * Ensure that no verifier is run from the wrapper.
2164  */
2165 static int no_verify(ir_prog *prog, void *ctx)
2166 {
2167         (void)prog;
2168         (void)ctx;
2169         return 0;
2170 }
2171
2172 /**
2173  * Ensure that no dumper is run from the wrapper.
2174  */
2175 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2176 {
2177         (void)prog;
2178         (void)ctx;
2179         (void)idx;
2180 }
2181
2182 /* create an ir_pog pass */
2183 ir_prog_pass_t *stat_dump_snapshot_pass(
2184         const char *name, const char *fname, const char *phase)
2185 {
2186         pass_t *pass = XMALLOCZ(pass_t);
2187
2188         def_prog_pass_constructor(
2189                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2190         pass->fname = fname;
2191         pass->phase = phase;
2192
2193         /* no dump/verify */
2194         pass->pass.dump_irprog   = no_dump;
2195         pass->pass.verify_irprog = no_verify;
2196
2197         return &pass->pass;
2198 }  /* stat_dump_snapshot_pass */
2199
2200 /** the hook entries for the Firm statistics module */
2201 static hook_entry_t stat_hooks[hook_last];
2202
2203 /* initialize the statistics module. */
2204 void firm_init_stat(void)
2205 {
2206 #define X(a)  a, sizeof(a)-1
2207 #define HOOK(h, fkt) \
2208         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2209         unsigned num = 0;
2210
2211         if (! (stat_options & FIRMSTAT_ENABLED))
2212                 return;
2213
2214         status = XMALLOCZ(stat_info_t);
2215
2216         /* enable statistics */
2217         status->stat_options = stat_options & FIRMSTAT_ENABLED ? stat_options : 0;
2218
2219         /* register all hooks */
2220         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2221         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2222         HOOK(hook_new_node,                           stat_new_node);
2223         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2224         HOOK(hook_normalize,                          stat_normalize);
2225         HOOK(hook_new_graph,                          stat_new_graph);
2226         HOOK(hook_free_graph,                         stat_free_graph);
2227         HOOK(hook_irg_walk,                           stat_irg_walk);
2228         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2229         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2230         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2231         HOOK(hook_reassociate,                        stat_reassociate);
2232         HOOK(hook_lower,                              stat_lower);
2233         HOOK(hook_inline,                             stat_inline);
2234         HOOK(hook_tail_rec,                           stat_tail_rec);
2235         HOOK(hook_strength_red,                       stat_strength_red);
2236         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2237         HOOK(hook_if_conversion,                      stat_if_conversion);
2238         HOOK(hook_func_call,                          stat_func_call);
2239         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2240         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2241
2242         obstack_init(&status->cnts);
2243         obstack_init(&status->be_data);
2244
2245         /* create the hash-tables */
2246         status->irg_hash   = new_pset(graph_cmp, 8);
2247         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2248
2249         /* create the wait queue */
2250         status->wait_q     = new_pdeq();
2251
2252         if (stat_options & FIRMSTAT_COUNT_STRONG_OP) {
2253                 /* build the pseudo-ops */
2254
2255                 _op_Phi0.code    = --num;
2256                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2257
2258                 _op_PhiM.code    = --num;
2259                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2260
2261                 _op_ProjM.code   = --num;
2262                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2263
2264                 _op_MulC.code    = --num;
2265                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2266
2267                 _op_DivC.code    = --num;
2268                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2269
2270                 _op_ModC.code    = --num;
2271                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2272
2273                 status->op_Phi0    = &_op_Phi0;
2274                 status->op_PhiM    = &_op_PhiM;
2275                 status->op_ProjM   = &_op_ProjM;
2276                 status->op_MulC    = &_op_MulC;
2277                 status->op_DivC    = &_op_DivC;
2278                 status->op_ModC    = &_op_ModC;
2279         } else {
2280                 status->op_Phi0    = NULL;
2281                 status->op_PhiM    = NULL;
2282                 status->op_ProjM   = NULL;
2283                 status->op_MulC    = NULL;
2284                 status->op_DivC    = NULL;
2285                 status->op_ModC    = NULL;
2286         }  /* if */
2287
2288         /* for Florian: count the Sel depth */
2289         if (stat_options & FIRMSTAT_COUNT_SELS) {
2290                 _op_SelSel.code    = --num;
2291                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2292
2293                 _op_SelSelSel.code = --num;
2294                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2295
2296                 status->op_SelSel    = &_op_SelSel;
2297                 status->op_SelSelSel = &_op_SelSelSel;
2298         } else {
2299                 status->op_SelSel    = NULL;
2300                 status->op_SelSelSel = NULL;
2301         }  /* if */
2302
2303         /* register the dumper */
2304         stat_register_dumper(&simple_dumper);
2305
2306         if (stat_options & FIRMSTAT_CSV_OUTPUT)
2307                 stat_register_dumper(&csv_dumper);
2308
2309         /* initialize the pattern hash */
2310         stat_init_pattern_history(stat_options & FIRMSTAT_PATTERN_ENABLED);
2311
2312         /* initialize the Const options */
2313         if (stat_options & FIRMSTAT_COUNT_CONSTS)
2314                 stat_init_const_cnt(status);
2315
2316         /* distribution table for parameter counts */
2317         status->dist_param_cnt = stat_new_int_distrib_tbl();
2318
2319         clear_optimization_counter();
2320
2321 #undef HOOK
2322 #undef X
2323 }  /* firm_init_stat */
2324
2325 /**
2326  * Frees all dumper structures.
2327  */
2328 static void stat_term_dumper(void)
2329 {
2330         dumper_t *dumper, *next_dumper;
2331
2332         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2333                 if (dumper->func_map)
2334                         del_pset(dumper->func_map);
2335
2336                 next_dumper = dumper->next;
2337                 free(dumper);
2338                 dumper = next_dumper;
2339         }  /* for */
2340 }  /* stat_term_dumper */
2341
2342
2343 /* Terminates the statistics module, frees all memory. */
2344 void stat_term(void)
2345 {
2346         if (status != (stat_info_t *)&status_disable) {
2347                 obstack_free(&status->be_data, NULL);
2348                 obstack_free(&status->cnts, NULL);
2349
2350                 stat_term_dumper();
2351
2352                 xfree(status);
2353                 status = (stat_info_t *)&status_disable;
2354         }
2355 }  /* stat_term */
2356
2357 /* returns 1 if statistics were initialized, 0 otherwise */
2358 int stat_is_active(void)
2359 {
2360         return status != (stat_info_t *)&status_disable;
2361 }  /* stat_is_active */
2362
2363 void init_stat(void)
2364 {
2365         lc_opt_entry_t *root_grp = firm_opt_get_root();
2366         lc_opt_entry_t *be_grp   = lc_opt_get_grp(root_grp, "be");
2367
2368         static const lc_opt_enum_mask_items_t stat_items[] = {
2369                 { "enabled",         FIRMSTAT_ENABLED         },
2370                 { "pattern",         FIRMSTAT_PATTERN_ENABLED },
2371                 { "count_strong_op", FIRMSTAT_COUNT_STRONG_OP },
2372                 { "count_dag",       FIRMSTAT_COUNT_DAG       },
2373                 { "count_deleted",   FIRMSTAT_COUNT_DELETED   },
2374                 { "count_sels",      FIRMSTAT_COUNT_SELS      },
2375                 { "count_consts",    FIRMSTAT_COUNT_CONSTS    },
2376                 { "csv_output",      FIRMSTAT_CSV_OUTPUT      },
2377                 { NULL,              0 }
2378         };
2379         static lc_opt_enum_mask_var_t statmask = { &stat_options, stat_items };
2380         static const lc_opt_table_entry_t stat_optionstable[] = {
2381                 LC_OPT_ENT_ENUM_MASK("statistics", "enable statistics",   &statmask),
2382                 LC_OPT_LAST
2383         };
2384         lc_opt_add_table(be_grp, stat_optionstable);
2385 }