Remove the unused function fail_char().
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44 #include "util.h"
45
46 /*
47  * need this to be static:
48  * Special pseudo Opcodes that we need to count some interesting cases
49  */
50
51 /**
52  * The Phi0, a node that is created during SSA construction
53  */
54 static ir_op _op_Phi0;
55
56 /** The PhiM, just to count memory Phi's. */
57 static ir_op _op_PhiM;
58
59 /** The Mul by Const node. */
60 static ir_op _op_MulC;
61
62 /** The Div by Const node. */
63 static ir_op _op_DivC;
64
65 /** The Div by Const node. */
66 static ir_op _op_ModC;
67
68 /** The memory Proj node. */
69 static ir_op _op_ProjM;
70
71 /** A Sel of a Sel */
72 static ir_op _op_SelSel;
73
74 /** A Sel of a Sel of a Sel */
75 static ir_op _op_SelSelSel;
76
77 /* ---------------------------------------------------------------------------------- */
78
79 /** Marks the begin of a statistic (hook) function. */
80 #define STAT_ENTER    ++status->recursive
81
82 /** Marks the end of a statistic (hook) functions. */
83 #define STAT_LEAVE    --status->recursive
84
85 /** Allows to enter a statistic function only when we are not already in a hook. */
86 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
87
88 /**
89  * global status
90  */
91 static const unsigned status_disable = 0;
92 static stat_info_t *status = (stat_info_t *)&status_disable;
93
94 /**
95  * Compare two elements of the opcode hash.
96  */
97 static int opcode_cmp(const void *elt, const void *key)
98 {
99         const node_entry_t *e1 = (const node_entry_t*)elt;
100         const node_entry_t *e2 = (const node_entry_t*)key;
101
102         return e1->op->code - e2->op->code;
103 }  /* opcode_cmp */
104
105 /**
106  * Compare two elements of the graph hash.
107  */
108 static int graph_cmp(const void *elt, const void *key)
109 {
110         const graph_entry_t *e1 = (const graph_entry_t*)elt;
111         const graph_entry_t *e2 = (const graph_entry_t*)key;
112
113         return e1->irg != e2->irg;
114 }  /* graph_cmp */
115
116 /**
117  * Compare two elements of the optimization hash.
118  */
119 static int opt_cmp(const void *elt, const void *key)
120 {
121         const opt_entry_t *e1 = (const opt_entry_t*)elt;
122         const opt_entry_t *e2 = (const opt_entry_t*)key;
123
124         return e1->op->code != e2->op->code;
125 }  /* opt_cmp */
126
127 /**
128  * Compare two elements of the block/extbb hash.
129  */
130 static int block_cmp(const void *elt, const void *key)
131 {
132         const block_entry_t *e1 = (const block_entry_t*)elt;
133         const block_entry_t *e2 = (const block_entry_t*)key;
134
135         /* it's enough to compare the block number */
136         return e1->block_nr != e2->block_nr;
137 }  /* block_cmp */
138
139 /**
140  * Compare two elements of the be_block hash.
141  */
142 static int be_block_cmp(const void *elt, const void *key)
143 {
144         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
145         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
146
147         return e1->block_nr != e2->block_nr;
148 }  /* be_block_cmp */
149
150 /**
151  * Compare two elements of reg pressure hash.
152  */
153 static int reg_pressure_cmp(const void *elt, const void *key)
154 {
155         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
156         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
157
158         return e1->class_name != e2->class_name;
159 }  /* reg_pressure_cmp */
160
161 /**
162  * Compare two elements of the perm_stat hash.
163  */
164 static int perm_stat_cmp(const void *elt, const void *key)
165 {
166         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
167         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
168
169         return e1->perm != e2->perm;
170 }  /* perm_stat_cmp */
171
172 /**
173  * Compare two elements of the perm_class hash.
174  */
175 static int perm_class_cmp(const void *elt, const void *key)
176 {
177         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
178         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
179
180         return e1->class_name != e2->class_name;
181 }  /* perm_class_cmp */
182
183 /**
184  * Compare two elements of the ir_op hash.
185  */
186 static int opcode_cmp_2(const void *elt, const void *key)
187 {
188         const ir_op *e1 = (const ir_op*)elt;
189         const ir_op *e2 = (const ir_op*)key;
190
191         return e1->code != e2->code;
192 }  /* opcode_cmp_2 */
193
194 /**
195  * Compare two elements of the address_mark set.
196  */
197 static int address_mark_cmp(const void *elt, const void *key, size_t size)
198 {
199         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
200         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
201         (void) size;
202
203         /* compare only the nodes, the rest is used as data container */
204         return e1->node != e2->node;
205 }  /* address_mark_cmp */
206
207 /**
208  * Clear all counter in a node_entry_t.
209  */
210 static void opcode_clear_entry(node_entry_t *elem)
211 {
212         cnt_clr(&elem->cnt_alive);
213         cnt_clr(&elem->new_node);
214         cnt_clr(&elem->into_Id);
215         cnt_clr(&elem->normalized);
216 }  /* opcode_clear_entry */
217
218 /**
219  * Returns the associates node_entry_t for an ir_op (and allocates
220  * one if not yet available).
221  *
222  * @param op    the IR operation
223  * @param hmap  a hash map containing ir_op* -> node_entry_t*
224  */
225 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
226 {
227         node_entry_t key;
228         node_entry_t *elem;
229
230         key.op = op;
231
232         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
233         if (elem)
234                 return elem;
235
236         elem = OALLOCZ(&status->cnts, node_entry_t);
237
238         /* clear counter */
239         opcode_clear_entry(elem);
240
241         elem->op = op;
242
243         return (node_entry_t*)pset_insert(hmap, elem, op->code);
244 }  /* opcode_get_entry */
245
246 /**
247  * Returns the associates ir_op for an opcode
248  *
249  * @param code  the IR opcode
250  * @param hmap  the hash map containing opcode -> ir_op*
251  */
252 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
253 {
254         ir_op key;
255
256         key.code = code;
257         return (ir_op*)pset_find(hmap, &key, code);
258 }  /* opcode_find_entry */
259
260 /**
261  * Clears all counter in a graph_entry_t.
262  *
263  * @param elem  the graph entry
264  * @param all   if non-zero, clears all counters, else leave accumulated ones
265  */
266 static void graph_clear_entry(graph_entry_t *elem, int all)
267 {
268         int i;
269
270         /* clear accumulated / non-accumulated counter */
271         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
272                 cnt_clr(&elem->cnt[i]);
273         }  /* for */
274
275         if (elem->block_hash) {
276                 del_pset(elem->block_hash);
277                 elem->block_hash = NULL;
278         }  /* if */
279
280         if (elem->extbb_hash) {
281                 del_pset(elem->extbb_hash);
282                 elem->extbb_hash = NULL;
283         }  /* if */
284
285         obstack_free(&elem->recalc_cnts, NULL);
286         obstack_init(&elem->recalc_cnts);
287 }  /* graph_clear_entry */
288
289 /**
290  * Returns the associated graph_entry_t for an IR graph.
291  *
292  * @param irg   the IR graph, NULL for the global counter
293  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
294  */
295 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
296 {
297         graph_entry_t key;
298         graph_entry_t *elem;
299         size_t i;
300
301         key.irg = irg;
302
303         elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg));
304
305         if (elem) {
306                 /* create hash map backend block information */
307                 if (! elem->be_block_hash)
308                         elem->be_block_hash = new_pset(be_block_cmp, 5);
309
310                 return elem;
311         }  /* if */
312
313         /* allocate a new one */
314         elem = OALLOCZ(&status->cnts, graph_entry_t);
315         obstack_init(&elem->recalc_cnts);
316
317         /* clear counter */
318         graph_clear_entry(elem, 1);
319
320         /* new hash table for opcodes here  */
321         elem->opcode_hash   = new_pset(opcode_cmp, 5);
322         elem->address_mark  = new_set(address_mark_cmp, 5);
323         elem->irg           = irg;
324
325         /* these hash tables are created on demand */
326         elem->block_hash = NULL;
327         elem->extbb_hash = NULL;
328
329         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
330                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
331
332         return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg));
333 }  /* graph_get_entry */
334
335 /**
336  * Clear all counter in an opt_entry_t.
337  */
338 static void opt_clear_entry(opt_entry_t *elem)
339 {
340         cnt_clr(&elem->count);
341 }  /* opt_clear_entry */
342
343 /**
344  * Returns the associated opt_entry_t for an IR operation.
345  *
346  * @param op    the IR operation
347  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
348  */
349 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
350 {
351         opt_entry_t key;
352         opt_entry_t *elem;
353
354         key.op = op;
355
356         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
357         if (elem)
358                 return elem;
359
360         elem = OALLOCZ(&status->cnts, opt_entry_t);
361
362         /* clear new counter */
363         opt_clear_entry(elem);
364
365         elem->op = op;
366
367         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
368 }  /* opt_get_entry */
369
370 /**
371  * clears all counter in a block_entry_t
372  */
373 static void block_clear_entry(block_entry_t *elem)
374 {
375         int i;
376
377         for (i = 0; i < _bcnt_last; ++i)
378                 cnt_clr(&elem->cnt[i]);
379 }  /* block_clear_entry */
380
381 /**
382  * Returns the associated block_entry_t for an block.
383  *
384  * @param block_nr  an IR  block number
385  * @param hmap      a hash map containing long -> block_entry_t
386  */
387 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
388 {
389         block_entry_t key;
390         block_entry_t *elem;
391
392         key.block_nr = block_nr;
393
394         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
395         if (elem)
396                 return elem;
397
398         elem = OALLOCZ(obst, block_entry_t);
399
400         /* clear new counter */
401         block_clear_entry(elem);
402
403         elem->block_nr = block_nr;
404
405         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
406 }  /* block_get_entry */
407
408 /**
409  * Clear all sets in be_block_entry_t.
410  */
411 static void be_block_clear_entry(be_block_entry_t *elem)
412 {
413         if (elem->reg_pressure)
414                 del_pset(elem->reg_pressure);
415
416         if (elem->sched_ready)
417                 stat_delete_distrib_tbl(elem->sched_ready);
418
419         if (elem->perm_class_stat)
420                 del_pset(elem->perm_class_stat);
421
422         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
423         elem->sched_ready     = stat_new_int_distrib_tbl();
424         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
425 }  /* be_block_clear_entry */
426
427 /**
428  * Returns the associated be_block_entry_t for an block.
429  *
430  * @param block_nr  an IR  block number
431  * @param hmap      a hash map containing long -> be_block_entry_t
432  */
433 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
434 {
435         be_block_entry_t key;
436         be_block_entry_t *elem;
437
438         key.block_nr = block_nr;
439
440         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
441         if (elem)
442                 return elem;
443
444         elem = OALLOCZ(obst, be_block_entry_t);
445
446         /* clear new counter */
447         be_block_clear_entry(elem);
448
449         elem->block_nr = block_nr;
450
451         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
452 }  /* be_block_get_entry */
453
454 /**
455  * clears all sets in perm_class_entry_t
456  */
457 static void perm_class_clear_entry(perm_class_entry_t *elem)
458 {
459         if (elem->perm_stat)
460                 del_pset(elem->perm_stat);
461
462         elem->perm_stat = new_pset(perm_stat_cmp, 5);
463 }  /* perm_class_clear_entry */
464
465 /**
466  * Returns the associated perm_class entry for a register class.
467  *
468  * @param class_name  the register class name
469  * @param hmap        a hash map containing class_name -> perm_class_entry_t
470  */
471 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
472                                                 hmap_perm_class_entry_t *hmap)
473 {
474         perm_class_entry_t key;
475         perm_class_entry_t *elem;
476
477         key.class_name = class_name;
478
479         elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name));
480         if (elem)
481                 return elem;
482
483         elem = OALLOCZ(obst, perm_class_entry_t);
484
485         /* clear new counter */
486         perm_class_clear_entry(elem);
487
488         elem->class_name = class_name;
489
490         return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name));
491 }  /* perm_class_get_entry */
492
493 /**
494  * clears all sets in perm_stat_entry_t
495  */
496 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
497 {
498         if (elem->chains)
499                 stat_delete_distrib_tbl(elem->chains);
500
501         if (elem->cycles)
502                 stat_delete_distrib_tbl(elem->cycles);
503
504         elem->chains = stat_new_int_distrib_tbl();
505         elem->cycles = stat_new_int_distrib_tbl();
506 }  /* perm_stat_clear_entry */
507
508 /**
509  * Returns the associated perm_stat entry for a perm.
510  *
511  * @param perm      the perm node
512  * @param hmap      a hash map containing perm -> perm_stat_entry_t
513  */
514 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
515 {
516         perm_stat_entry_t key;
517         perm_stat_entry_t *elem;
518
519         key.perm = perm;
520
521         elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm));
522         if (elem)
523                 return elem;
524
525         elem = OALLOCZ(obst, perm_stat_entry_t);
526
527         /* clear new counter */
528         perm_stat_clear_entry(elem);
529
530         elem->perm = perm;
531
532         return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm));
533 }  /* perm_stat_get_entry */
534
535 /**
536  * Clear optimizations counter,
537  */
538 static void clear_optimization_counter(void)
539 {
540         int i;
541         for (i = 0; i < FS_OPT_MAX; ++i)
542                 cnt_clr(&status->num_opts[i]);
543 }
544
545 /**
546  * Returns the ir_op for an IR-node,
547  * handles special cases and return pseudo op codes.
548  *
549  * @param none  an IR node
550  */
551 static ir_op *stat_get_irn_op(ir_node *node)
552 {
553         ir_op *op = get_irn_op(node);
554         unsigned opc = op->code;
555
556         switch (opc) {
557         case iro_Phi:
558                 if (get_irn_arity(node) == 0) {
559                         /* special case, a Phi0 node, count on extra counter */
560                         op = status->op_Phi0 ? status->op_Phi0 : op;
561                 } else if (get_irn_mode(node) == mode_M) {
562                         /* special case, a Memory Phi node, count on extra counter */
563                         op = status->op_PhiM ? status->op_PhiM : op;
564                 }  /* if */
565                 break;
566         case iro_Proj:
567                 if (get_irn_mode(node) == mode_M) {
568                         /* special case, a Memory Proj node, count on extra counter */
569                         op = status->op_ProjM ? status->op_ProjM : op;
570                 }  /* if */
571                 break;
572         case iro_Mul:
573                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
574                         /* special case, a Multiply by a const, count on extra counter */
575                         op = status->op_MulC ? status->op_MulC : op;
576                 }  /* if */
577                 break;
578         case iro_Div:
579                 if (is_Const(get_Div_right(node))) {
580                         /* special case, a division by a const, count on extra counter */
581                         op = status->op_DivC ? status->op_DivC : op;
582                 }  /* if */
583                 break;
584         case iro_Mod:
585                 if (is_Const(get_Mod_right(node))) {
586                         /* special case, a module by a const, count on extra counter */
587                         op = status->op_ModC ? status->op_ModC : op;
588                 }  /* if */
589                 break;
590         case iro_Sel:
591                 if (is_Sel(get_Sel_ptr(node))) {
592                         /* special case, a Sel of a Sel, count on extra counter */
593                         op = status->op_SelSel ? status->op_SelSel : op;
594                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
595                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
596                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
597                         }  /* if */
598                 }  /* if */
599                 break;
600         default:
601                 ;
602         }  /* switch */
603
604         return op;
605 }  /* stat_get_irn_op */
606
607 /**
608  * update the block counter
609  */
610 static void undate_block_info(ir_node *node, graph_entry_t *graph)
611 {
612         ir_op *op = get_irn_op(node);
613         ir_node *block;
614         block_entry_t *b_entry;
615         int i, arity;
616
617         /* check for block */
618         if (op == op_Block) {
619                 arity = get_irn_arity(node);
620                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
621                 /* mark start end block to allow to filter them out */
622                 if (node == get_irg_start_block(graph->irg))
623                         b_entry->is_start = 1;
624                 else if (node == get_irg_end_block(graph->irg))
625                         b_entry->is_end = 1;
626
627                 /* count all incoming edges */
628                 for (i = 0; i < arity; ++i) {
629                         ir_node *pred = get_irn_n(node, i);
630                         ir_node *other_block = get_nodes_block(pred);
631                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
632
633                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
634                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
635                 }  /* for */
636                 return;
637         }  /* if */
638
639         block   = get_nodes_block(node);
640         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
641
642         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
643                 /* count data Phi per block */
644                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
645         }  /* if */
646
647         /* we have a new node in our block */
648         cnt_inc(&b_entry->cnt[bcnt_nodes]);
649
650         /* don't count keep-alive edges */
651         if (is_End(node))
652                 return;
653
654         arity = get_irn_arity(node);
655
656         for (i = 0; i < arity; ++i) {
657                 ir_node *pred = get_irn_n(node, i);
658                 ir_node *other_block;
659
660                 other_block = get_nodes_block(pred);
661
662                 if (other_block == block)
663                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
664                 else {
665                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
666
667                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
668                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
669                 }  /* if */
670         }  /* for */
671 }  /* undate_block_info */
672
673 /**
674  * Update the extended block counter.
675  */
676 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
677 {
678         ir_op *op = get_irn_op(node);
679         ir_extblk *extbb;
680         extbb_entry_t *eb_entry;
681         int i, arity;
682
683         /* check for block */
684         if (op == op_Block) {
685                 extbb = get_nodes_extbb(node);
686                 arity = get_irn_arity(node);
687                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
688
689                 /* count all incoming edges */
690                 for (i = 0; i < arity; ++i) {
691                         ir_node *pred = get_irn_n(node, i);
692                         ir_extblk *other_extbb = get_nodes_extbb(pred);
693
694                         if (extbb != other_extbb) {
695                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
696
697                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
698                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
699                         }  /* if */
700                 }  /* for */
701                 return;
702         }  /* if */
703
704         extbb    = get_nodes_extbb(node);
705         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
706
707         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
708                 /* count data Phi per extbb */
709                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
710         }  /* if */
711
712         /* we have a new node in our block */
713         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
714
715         /* don't count keep-alive edges */
716         if (is_End(node))
717                 return;
718
719         arity = get_irn_arity(node);
720
721         for (i = 0; i < arity; ++i) {
722                 ir_node *pred = get_irn_n(node, i);
723                 ir_extblk *other_extbb = get_nodes_extbb(pred);
724
725                 if (other_extbb == extbb)
726                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
727                 else {
728                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
729
730                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
731                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
732                 }  /* if */
733         }  /* for */
734 }  /* update_extbb_info */
735
736 /**
737  * Calculates how many arguments of the call are const, updates
738  * param distribution.
739  */
740 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
741 {
742         int i, num_const_args = 0, num_local_adr = 0;
743         int n = get_Call_n_params(call);
744
745         for (i = 0; i < n; ++i) {
746                 ir_node *param = get_Call_param(call, i);
747
748                 if (is_irn_constlike(param))
749                         ++num_const_args;
750                 else if (is_Sel(param)) {
751                         ir_node *base = param;
752
753                         do {
754                                 base = get_Sel_ptr(base);
755                         } while (is_Sel(base));
756
757                         if (base == get_irg_frame(current_ir_graph))
758                                 ++num_local_adr;
759                 }
760
761         }  /* for */
762
763         if (num_const_args > 0)
764                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
765         if (num_const_args == n)
766                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
767         if (num_local_adr > 0)
768                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
769
770         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
771 }  /* analyse_params_of_Call */
772
773 /**
774  * Update info on calls.
775  *
776  * @param call   The call
777  * @param graph  The graph entry containing the call
778  */
779 static void stat_update_call(ir_node *call, graph_entry_t *graph)
780 {
781         ir_node   *block = get_nodes_block(call);
782         ir_node   *ptr = get_Call_ptr(call);
783         ir_entity *ent = NULL;
784         ir_graph  *callee = NULL;
785
786         /*
787          * If the block is bad, the whole subgraph will collapse later
788          * so do not count this call.
789          * This happens in dead code.
790          */
791         if (is_Bad(block))
792                 return;
793
794         cnt_inc(&graph->cnt[gcnt_all_calls]);
795
796         /* found a call, this function is not a leaf */
797         graph->is_leaf = 0;
798
799         if (is_SymConst(ptr)) {
800                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
801                         /* ok, we seems to know the entity */
802                         ent = get_SymConst_entity(ptr);
803                         callee = get_entity_irg(ent);
804
805                         /* it is recursive, if it calls at least once */
806                         if (callee == graph->irg)
807                                 graph->is_recursive = 1;
808                         if (callee == NULL)
809                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
810                 }  /* if */
811         } else {
812                 /* indirect call, be could not predict */
813                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
814
815                 /* NOT a leaf call */
816                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
817         }  /* if */
818
819         /* check, if it's a chain-call: Then, the call-block
820          * must dominate the end block. */
821         {
822                 ir_node *curr = get_irg_end_block(graph->irg);
823                 int depth = get_Block_dom_depth(block);
824
825                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
826                         curr = get_Block_idom(curr);
827
828                         if (! curr || !is_Block(curr))
829                                 break;
830                 }  /* for */
831
832                 if (curr != block)
833                         graph->is_chain_call = 0;
834         }
835
836         /* check, if the callee is a leaf */
837         if (callee) {
838                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
839
840                 if (called->is_analyzed) {
841                         if (! called->is_leaf)
842                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
843                 }  /* if */
844         }  /* if */
845
846         analyse_params_of_Call(graph, call);
847 }  /* stat_update_call */
848
849 /**
850  * Update info on calls for graphs on the wait queue.
851  */
852 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
853 {
854         ir_node   *block = get_nodes_block(call);
855         ir_node   *ptr = get_Call_ptr(call);
856         ir_entity *ent = NULL;
857         ir_graph  *callee = NULL;
858
859         /*
860          * If the block is bad, the whole subgraph will collapse later
861          * so do not count this call.
862          * This happens in dead code.
863          */
864         if (is_Bad(block))
865                 return;
866
867         if (is_SymConst(ptr)) {
868                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
869                         /* ok, we seems to know the entity */
870                         ent = get_SymConst_entity(ptr);
871                         callee = get_entity_irg(ent);
872                 }  /* if */
873         }  /* if */
874
875         /* check, if the callee is a leaf */
876         if (callee) {
877                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
878
879                 assert(called->is_analyzed);
880
881                 if (! called->is_leaf)
882                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
883         } else
884                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
885 }  /* stat_update_call_2 */
886
887 /**
888  * Find the base address and entity of an Sel node.
889  *
890  * @param sel  the node
891  *
892  * @return the base address.
893  */
894 static ir_node *find_base_adr(ir_node *sel)
895 {
896         ir_node *ptr = get_Sel_ptr(sel);
897
898         while (is_Sel(ptr)) {
899                 sel = ptr;
900                 ptr = get_Sel_ptr(sel);
901         }
902         return ptr;
903 }  /* find_base_adr */
904
905 /**
906  * Update info on Load/Store address statistics.
907  */
908 static void stat_update_address(ir_node *node, graph_entry_t *graph)
909 {
910         unsigned opc = get_irn_opcode(node);
911         ir_node *base;
912         ir_graph *irg;
913
914         switch (opc) {
915         case iro_SymConst:
916                 /* a global address */
917                 cnt_inc(&graph->cnt[gcnt_global_adr]);
918                 break;
919         case iro_Sel:
920                 base = find_base_adr(node);
921                 irg = current_ir_graph;
922                 if (base == get_irg_frame(irg)) {
923                         /* a local Variable. */
924                         cnt_inc(&graph->cnt[gcnt_local_adr]);
925                 } else {
926                         /* Pointer access */
927                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
928                                 /* pointer access through parameter, check for THIS */
929                                 ir_entity *ent = get_irg_entity(irg);
930
931                                 if (ent != NULL) {
932                                         ir_type *ent_tp = get_entity_type(ent);
933
934                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
935                                                 if (get_Proj_proj(base) == 0) {
936                                                         /* THIS pointer */
937                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
938                                                         goto end_parameter;
939                                                 }  /* if */
940                                         }  /* if */
941                                 }  /* if */
942                                 /* other parameter */
943                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
944 end_parameter: ;
945                         } else {
946                                 /* unknown Pointer access */
947                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
948                         }  /* if */
949                 }  /* if */
950         default:
951                 ;
952         }  /* switch */
953 }  /* stat_update_address */
954
955 /**
956  * Walker for reachable nodes count.
957  */
958 static void update_node_stat(ir_node *node, void *env)
959 {
960         graph_entry_t *graph = (graph_entry_t*)env;
961         node_entry_t *entry;
962
963         ir_op *op = stat_get_irn_op(node);
964         int i, arity = get_irn_arity(node);
965
966         entry = opcode_get_entry(op, graph->opcode_hash);
967
968         cnt_inc(&entry->cnt_alive);
969         cnt_add_i(&graph->cnt[gcnt_edges], arity);
970
971         /* count block edges */
972         undate_block_info(node, graph);
973
974         /* count extended block edges */
975         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
976                 if (graph->irg != get_const_code_irg())
977                         update_extbb_info(node, graph);
978         }  /* if */
979
980         /* handle statistics for special node types */
981
982         switch (op->code) {
983         case iro_Call:
984                 /* check for properties that depends on calls like recursion/leaf/indirect call */
985                 stat_update_call(node, graph);
986                 break;
987         case iro_Load:
988                 /* check address properties */
989                 stat_update_address(get_Load_ptr(node), graph);
990                 break;
991         case iro_Store:
992                 /* check address properties */
993                 stat_update_address(get_Store_ptr(node), graph);
994                 break;
995         case iro_Phi:
996                 /* check for non-strict Phi nodes */
997                 for (i = arity - 1; i >= 0; --i) {
998                         ir_node *pred = get_Phi_pred(node, i);
999                         if (is_Unknown(pred)) {
1000                                 /* found an Unknown predecessor, graph is not strict */
1001                                 graph->is_strict = 0;
1002                                 break;
1003                         }
1004                 }
1005         default:
1006                 ;
1007         }  /* switch */
1008
1009         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1010         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1011                 int i;
1012
1013                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1014                         ir_node *pred = get_irn_n(node, i);
1015
1016                         if (is_Const(pred)) {
1017                                 /* check properties of constants */
1018                                 stat_update_const(status, pred, graph);
1019                         }  /* if */
1020                 }  /* for */
1021         }  /* if */
1022 }  /* update_node_stat */
1023
1024 /**
1025  * Walker for reachable nodes count for graphs on the wait_q.
1026  */
1027 static void update_node_stat_2(ir_node *node, void *env)
1028 {
1029         graph_entry_t *graph = (graph_entry_t*)env;
1030
1031         /* check for properties that depends on calls like recursion/leaf/indirect call */
1032         if (is_Call(node))
1033                 stat_update_call_2(node, graph);
1034 }  /* update_node_stat_2 */
1035
1036 /**
1037  * Get the current address mark.
1038  */
1039 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1040 {
1041         address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1042
1043         return value ? value->mark : 0;
1044 }  /* get_adr_mark */
1045
1046 /**
1047  * Set the current address mark.
1048  */
1049 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1050 {
1051         address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1052
1053         value->mark = val;
1054 }  /* set_adr_mark */
1055
1056 #undef DUMP_ADR_MODE
1057
1058 #ifdef DUMP_ADR_MODE
1059 /**
1060  * a vcg attribute hook: Color a node with a different color if
1061  * it's identified as a part of an address expression or at least referenced
1062  * by an address expression.
1063  */
1064 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1065 {
1066         ir_node *n           = local ? local : node;
1067         ir_graph *irg        = get_irn_irg(n);
1068         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1069         unsigned mark        = get_adr_mark(graph, n);
1070
1071         if (mark & MARK_ADDRESS_CALC)
1072                 fprintf(F, "color: purple");
1073         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1074                 fprintf(F, "color: pink");
1075         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1076                 fprintf(F, "color: lightblue");
1077         else
1078                 return 0;
1079
1080         /* I know the color! */
1081         return 1;
1082 }  /* stat_adr_mark_hook */
1083 #endif /* DUMP_ADR_MODE */
1084
1085 /**
1086  * Return the "operational" mode of a Firm node.
1087  */
1088 static ir_mode *get_irn_op_mode(ir_node *node)
1089 {
1090         switch (get_irn_opcode(node)) {
1091         case iro_Load:
1092                 return get_Load_mode(node);
1093         case iro_Store:
1094                 return get_irn_mode(get_Store_value(node));
1095         case iro_Div:
1096                 return get_irn_mode(get_Div_left(node));
1097         case iro_Mod:
1098                 return get_irn_mode(get_Mod_left(node));
1099         case iro_Cmp:
1100                 /* Cmp is no address calculation, or is it? */
1101         default:
1102                 return get_irn_mode(node);
1103         }  /* switch */
1104 }  /* get_irn_op_mode */
1105
1106 /**
1107  * Post-walker that marks every node that is an address calculation.
1108  *
1109  * Users of a node must be visited first. We ensure this by
1110  * calling it in the post of an outs walk. This should work even in cycles,
1111  * while the normal pre-walk will not.
1112  */
1113 static void mark_address_calc(ir_node *node, void *env)
1114 {
1115         graph_entry_t *graph = (graph_entry_t*)env;
1116         ir_mode *mode = get_irn_op_mode(node);
1117         int i, n;
1118         unsigned mark_preds = MARK_REF_NON_ADR;
1119
1120         if (! mode_is_data(mode))
1121                 return;
1122
1123         if (mode_is_reference(mode)) {
1124                 /* a reference is calculated here, we are sure */
1125                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1126
1127                 mark_preds = MARK_REF_ADR;
1128         } else {
1129                 unsigned mark = get_adr_mark(graph, node);
1130
1131                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1132                         /*
1133                          * this node has no reference mode, but is only
1134                          * referenced by address calculations
1135                          */
1136                         mark_preds = MARK_REF_ADR;
1137                 }  /* if */
1138         }  /* if */
1139
1140         /* mark all predecessors */
1141         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1142                 ir_node *pred = get_irn_n(node, i);
1143
1144                 mode = get_irn_op_mode(pred);
1145                 if (! mode_is_data(mode))
1146                         continue;
1147
1148                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1149         }  /* for */
1150 }  /* mark_address_calc */
1151
1152 /**
1153  * Post-walker that marks every node that is an address calculation.
1154  *
1155  * Users of a node must be visited first. We ensure this by
1156  * calling it in the post of an outs walk. This should work even in cycles,
1157  * while the normal pre-walk will not.
1158  */
1159 static void count_adr_ops(ir_node *node, void *env)
1160 {
1161         graph_entry_t *graph = (graph_entry_t*)env;
1162         unsigned mark        = get_adr_mark(graph, node);
1163
1164         if (mark & MARK_ADDRESS_CALC)
1165                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1166         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1167                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1168         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1169                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1170 }  /* count_adr_ops */
1171
1172 /**
1173  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1174  * is called, must recalculate all statistic info.
1175  *
1176  * @param global    The global entry
1177  * @param graph     The current entry
1178  */
1179 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1180 {
1181         node_entry_t *entry;
1182         int i;
1183
1184         /* clear first the alive counter in the graph */
1185         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1186                 cnt_clr(&entry->cnt_alive);
1187         }  /* foreach_pset */
1188
1189         /* set pessimistic values */
1190         graph->is_leaf       = 1;
1191         graph->is_leaf_call  = LCS_UNKNOWN;
1192         graph->is_recursive  = 0;
1193         graph->is_chain_call = 1;
1194         graph->is_strict     = 1;
1195
1196         /* create new block counter */
1197         graph->block_hash = new_pset(block_cmp, 5);
1198
1199         /* we need dominator info */
1200         if (graph->irg != get_const_code_irg()) {
1201                 assure_doms(graph->irg);
1202
1203                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1204                         /* we need extended basic blocks */
1205                         compute_extbb(graph->irg);
1206
1207                         /* create new extbb counter */
1208                         graph->extbb_hash = new_pset(block_cmp, 5);
1209                 }  /* if */
1210         }  /* if */
1211
1212         /* count the nodes in the graph */
1213         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1214
1215 #if 0
1216         /* Uncomment this code if chain-call means call exact one. */
1217         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1218
1219         /* check if we have more than 1 call */
1220         if (cnt_gt(entry->cnt_alive, 1))
1221                 graph->is_chain_call = 0;
1222 #endif
1223
1224         /* recursive functions are never chain calls, leafs don't have calls */
1225         if (graph->is_recursive || graph->is_leaf)
1226                 graph->is_chain_call = 0;
1227
1228         /* assume we walk every graph only ONCE, we could sum here the global count */
1229         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1230                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1231
1232                 /* update the node counter */
1233                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1234         }  /* foreach_pset */
1235
1236         /* count the number of address calculation */
1237         if (graph->irg != get_const_code_irg()) {
1238                 ir_graph *rem = current_ir_graph;
1239
1240                 assure_irg_outs(graph->irg);
1241
1242                 /* Must be done an the outs graph */
1243                 current_ir_graph = graph->irg;
1244                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1245                 current_ir_graph = rem;
1246
1247 #ifdef DUMP_ADR_MODE
1248                 /* register the vcg hook and dump the graph for test */
1249                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1250                 dump_ir_block_graph(graph->irg, "-adr");
1251                 set_dump_node_vcgattr_hook(NULL);
1252 #endif /* DUMP_ADR_MODE */
1253
1254                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1255         }  /* if */
1256
1257         /* count the DAG's */
1258         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1259                 count_dags_in_graph(global, graph);
1260
1261         /* calculate the patterns of this graph */
1262         stat_calc_pattern_history(graph->irg);
1263
1264         /* leaf function did not call others */
1265         if (graph->is_leaf)
1266                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1267         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1268                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1269                 pdeq_putl(status->wait_q, graph);
1270         }  /* if */
1271
1272         /* we have analyzed this graph */
1273         graph->is_analyzed = 1;
1274
1275         /* accumulate all counter's */
1276         for (i = 0; i < _gcnt_last; ++i)
1277                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1278 }  /* update_graph_stat */
1279
1280 /**
1281  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1282  * must finish all statistic info calculations.
1283  *
1284  * @param global    The global entry
1285  * @param graph     The current entry
1286  */
1287 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1288 {
1289         (void) global;
1290         if (graph->is_deleted) {
1291                 /* deleted, ignore */
1292                 return;
1293         }
1294
1295         if (graph->irg) {
1296                 /* count the nodes in the graph */
1297                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1298
1299                 if (graph->is_leaf_call == LCS_UNKNOWN)
1300                         graph->is_leaf_call = LCS_LEAF_CALL;
1301         }  /* if */
1302 }  /* update_graph_stat_2 */
1303
1304 /**
1305  * Register a dumper.
1306  */
1307 static void stat_register_dumper(const dumper_t *dumper)
1308 {
1309         dumper_t *p = XMALLOC(dumper_t);
1310
1311         memcpy(p, dumper, sizeof(*p));
1312
1313         p->next        = status->dumper;
1314         p->status      = status;
1315         status->dumper = p;
1316
1317         /* FIXME: memory leak */
1318 }  /* stat_register_dumper */
1319
1320 /**
1321  * Dumps the statistics of an IR graph.
1322  */
1323 static void stat_dump_graph(graph_entry_t *entry)
1324 {
1325         dumper_t *dumper;
1326
1327         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1328                 if (dumper->dump_graph)
1329                         dumper->dump_graph(dumper, entry);
1330         }  /* for */
1331 }  /* stat_dump_graph */
1332
1333 /**
1334  * Calls all registered dumper functions.
1335  */
1336 static void stat_dump_registered(graph_entry_t *entry)
1337 {
1338         dumper_t *dumper;
1339
1340         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1341                 if (dumper->func_map) {
1342                         dump_graph_FUNC func;
1343
1344                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1345                                 func(dumper, entry);
1346                 }  /* if */
1347         }  /* for */
1348 }  /* stat_dump_registered */
1349
1350 /**
1351  * Dumps a constant table.
1352  */
1353 static void stat_dump_consts(const constant_info_t *tbl)
1354 {
1355         dumper_t *dumper;
1356
1357         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1358                 if (dumper->dump_const_tbl)
1359                         dumper->dump_const_tbl(dumper, tbl);
1360         }  /* for */
1361 }  /* stat_dump_consts */
1362
1363 /**
1364  * Dumps the parameter distribution
1365  */
1366 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1367 {
1368         dumper_t *dumper;
1369
1370         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1371                 if (dumper->dump_param_tbl)
1372                         dumper->dump_param_tbl(dumper, tbl, global);
1373         }  /* for */
1374 }  /* stat_dump_param_tbl */
1375
1376 /**
1377  * Dumps the optimization counter
1378  */
1379 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1380 {
1381         dumper_t *dumper;
1382
1383         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1384                 if (dumper->dump_opt_cnt)
1385                         dumper->dump_opt_cnt(dumper, tbl, len);
1386         }  /* for */
1387 }  /* stat_dump_opt_cnt */
1388
1389 /**
1390  * Initialize the dumper.
1391  */
1392 static void stat_dump_init(const char *name)
1393 {
1394         dumper_t *dumper;
1395
1396         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1397                 if (dumper->init)
1398                         dumper->init(dumper, name);
1399         }  /* for */
1400 }  /* stat_dump_init */
1401
1402 /**
1403  * Finish the dumper.
1404  */
1405 static void stat_dump_finish(void)
1406 {
1407         dumper_t *dumper;
1408
1409         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1410                 if (dumper->finish)
1411                         dumper->finish(dumper);
1412         }  /* for */
1413 }  /* stat_dump_finish */
1414
1415 /**
1416  * Register an additional function for all dumper.
1417  */
1418 void stat_register_dumper_func(dump_graph_FUNC func)
1419 {
1420         dumper_t *dumper;
1421
1422         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1423                 if (! dumper->func_map)
1424                         dumper->func_map = pset_new_ptr(3);
1425                 pset_insert_ptr(dumper->func_map, (void*)func);
1426         }  /* for */
1427 }  /* stat_register_dumper_func */
1428
1429 /* ---------------------------------------------------------------------- */
1430
1431 /*
1432  * Helper: get an ir_op from an opcode.
1433  */
1434 ir_op *stat_get_op_from_opcode(ir_opcode code)
1435 {
1436         return opcode_find_entry(code, status->ir_op_hash);
1437 }  /* stat_get_op_from_opcode */
1438
1439 /**
1440  * Hook: A new IR op is registered.
1441  *
1442  * @param ctx  the hook context
1443  * @param op   the new IR opcode that was created.
1444  */
1445 static void stat_new_ir_op(void *ctx, ir_op *op)
1446 {
1447         (void) ctx;
1448         if (! status->stat_options)
1449                 return;
1450
1451         STAT_ENTER;
1452         {
1453                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1454
1455                 /* execute for side effect :-) */
1456                 (void)opcode_get_entry(op, graph->opcode_hash);
1457
1458                 pset_insert(status->ir_op_hash, op, op->code);
1459         }
1460         STAT_LEAVE;
1461 }  /* stat_new_ir_op */
1462
1463 /**
1464  * Hook: An IR op is freed.
1465  *
1466  * @param ctx  the hook context
1467  * @param op   the IR opcode that is freed
1468  */
1469 static void stat_free_ir_op(void *ctx, ir_op *op)
1470 {
1471         (void) ctx;
1472         (void) op;
1473         if (! status->stat_options)
1474                 return;
1475
1476         STAT_ENTER;
1477         {
1478         }
1479         STAT_LEAVE;
1480 }  /* stat_free_ir_op */
1481
1482 /**
1483  * Hook: A new node is created.
1484  *
1485  * @param ctx   the hook context
1486  * @param irg   the IR graph on which the node is created
1487  * @param node  the new IR node that was created
1488  */
1489 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1490 {
1491         (void) ctx;
1492         (void) irg;
1493         if (! status->stat_options)
1494                 return;
1495
1496         /* do NOT count during dead node elimination */
1497         if (status->in_dead_node_elim)
1498                 return;
1499
1500         STAT_ENTER;
1501         {
1502                 node_entry_t *entry;
1503                 graph_entry_t *graph;
1504                 ir_op *op = stat_get_irn_op(node);
1505
1506                 /* increase global value */
1507                 graph = graph_get_entry(NULL, status->irg_hash);
1508                 entry = opcode_get_entry(op, graph->opcode_hash);
1509                 cnt_inc(&entry->new_node);
1510
1511                 /* increase local value */
1512                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1513                 entry = opcode_get_entry(op, graph->opcode_hash);
1514                 cnt_inc(&entry->new_node);
1515         }
1516         STAT_LEAVE;
1517 }  /* stat_new_node */
1518
1519 /**
1520  * Hook: A node is changed into a Id node
1521  *
1522  * @param ctx   the hook context
1523  * @param node  the IR node that will be turned into an ID
1524  */
1525 static void stat_turn_into_id(void *ctx, ir_node *node)
1526 {
1527         (void) ctx;
1528         if (! status->stat_options)
1529                 return;
1530
1531         STAT_ENTER;
1532         {
1533                 node_entry_t *entry;
1534                 graph_entry_t *graph;
1535                 ir_op *op = stat_get_irn_op(node);
1536
1537                 /* increase global value */
1538                 graph = graph_get_entry(NULL, status->irg_hash);
1539                 entry = opcode_get_entry(op, graph->opcode_hash);
1540                 cnt_inc(&entry->into_Id);
1541
1542                 /* increase local value */
1543                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1544                 entry = opcode_get_entry(op, graph->opcode_hash);
1545                 cnt_inc(&entry->into_Id);
1546         }
1547         STAT_LEAVE;
1548 }  /* stat_turn_into_id */
1549
1550 /**
1551  * Hook: A node is normalized
1552  *
1553  * @param ctx   the hook context
1554  * @param node  the IR node that was normalized
1555  */
1556 static void stat_normalize(void *ctx, ir_node *node)
1557 {
1558         (void) ctx;
1559         if (! status->stat_options)
1560                 return;
1561
1562         STAT_ENTER;
1563         {
1564                 node_entry_t *entry;
1565                 graph_entry_t *graph;
1566                 ir_op *op = stat_get_irn_op(node);
1567
1568                 /* increase global value */
1569                 graph = graph_get_entry(NULL, status->irg_hash);
1570                 entry = opcode_get_entry(op, graph->opcode_hash);
1571                 cnt_inc(&entry->normalized);
1572
1573                 /* increase local value */
1574                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1575                 entry = opcode_get_entry(op, graph->opcode_hash);
1576                 cnt_inc(&entry->normalized);
1577         }
1578         STAT_LEAVE;
1579 }  /* stat_normalize */
1580
1581 /**
1582  * Hook: A new graph was created
1583  *
1584  * @param ctx  the hook context
1585  * @param irg  the new IR graph that was created
1586  * @param ent  the entity of this graph
1587  */
1588 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1589 {
1590         (void) ctx;
1591         if (! status->stat_options)
1592                 return;
1593
1594         STAT_ENTER;
1595         {
1596                 /* execute for side effect :-) */
1597                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1598
1599                 graph->ent           = ent;
1600                 graph->is_deleted    = 0;
1601                 graph->is_leaf       = 0;
1602                 graph->is_leaf_call  = 0;
1603                 graph->is_recursive  = 0;
1604                 graph->is_chain_call = 0;
1605                 graph->is_strict     = 1;
1606                 graph->is_analyzed   = 0;
1607         }
1608         STAT_LEAVE;
1609 }  /* stat_new_graph */
1610
1611 /**
1612  * Hook: A graph will be deleted
1613  *
1614  * @param ctx  the hook context
1615  * @param irg  the IR graph that will be deleted
1616  *
1617  * Note that we still hold the information for this graph
1618  * in our hash maps, only a flag is set which prevents this
1619  * information from being changed, it's "frozen" from now.
1620  */
1621 static void stat_free_graph(void *ctx, ir_graph *irg)
1622 {
1623         (void) ctx;
1624         if (! status->stat_options)
1625                 return;
1626
1627         STAT_ENTER;
1628         {
1629                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1630                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1631
1632                 graph->is_deleted = 1;
1633
1634                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1635                         /* count the nodes of the graph yet, it will be destroyed later */
1636                         update_graph_stat(global, graph);
1637                 }  /* if */
1638         }
1639         STAT_LEAVE;
1640 }  /* stat_free_graph */
1641
1642 /**
1643  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1644  *
1645  * @param ctx  the hook context
1646  * @param irg  the IR graph that will be walked
1647  * @param pre  the pre walker
1648  * @param post the post walker
1649  */
1650 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1651 {
1652         (void) ctx;
1653         (void) pre;
1654         (void) post;
1655         if (! status->stat_options)
1656                 return;
1657
1658         STAT_ENTER_SINGLE;
1659         {
1660                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1661
1662                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1663         }
1664         STAT_LEAVE;
1665 }  /* stat_irg_walk */
1666
1667 /**
1668  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1669  *
1670  * @param ctx  the hook context
1671  * @param irg  the IR graph that will be walked
1672  * @param pre  the pre walker
1673  * @param post the post walker
1674  */
1675 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1676 {
1677         /* for now, do NOT differentiate between blockwise and normal */
1678         stat_irg_walk(ctx, irg, pre, post);
1679 }  /* stat_irg_walk_blkwise */
1680
1681 /**
1682  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1683  *
1684  * @param ctx  the hook context
1685  * @param irg  the IR graph that will be walked
1686  * @param node the IR node
1687  * @param pre  the pre walker
1688  * @param post the post walker
1689  */
1690 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1691 {
1692         (void) ctx;
1693         (void) node;
1694         (void) pre;
1695         (void) post;
1696         if (! status->stat_options)
1697                 return;
1698
1699         STAT_ENTER_SINGLE;
1700         {
1701                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1702
1703                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1704         }
1705         STAT_LEAVE;
1706 }  /* stat_irg_block_walk */
1707
1708 /**
1709  * Called for every node that is removed due to an optimization.
1710  *
1711  * @param n     the IR node that will be removed
1712  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1713  * @param kind  the optimization kind
1714  */
1715 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1716 {
1717         opt_entry_t *entry;
1718         ir_op *op = stat_get_irn_op(n);
1719
1720         /* ignore CSE for Constants */
1721         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1722                 return;
1723
1724         /* increase global value */
1725         entry = opt_get_entry(op, hmap);
1726         cnt_inc(&entry->count);
1727 }  /* removed_due_opt */
1728
1729 /**
1730  * Hook: Some nodes were optimized into some others due to an optimization.
1731  *
1732  * @param ctx  the hook context
1733  */
1734 static void stat_merge_nodes(
1735     void *ctx,
1736     ir_node **new_node_array, int new_num_entries,
1737     ir_node **old_node_array, int old_num_entries,
1738     hook_opt_kind opt)
1739 {
1740         (void) ctx;
1741         if (! status->stat_options)
1742                 return;
1743
1744         STAT_ENTER;
1745         {
1746                 int i, j;
1747                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1748
1749                 cnt_inc(&status->num_opts[opt]);
1750                 if (status->reassoc_run)
1751                         opt = HOOK_OPT_REASSOC;
1752
1753                 for (i = 0; i < old_num_entries; ++i) {
1754                         /* nodes might be in new and old, so if we found a node
1755                            in both sets, this one  is NOT removed */
1756                         for (j = 0; j < new_num_entries; ++j) {
1757                                 if (old_node_array[i] == new_node_array[j])
1758                                         break;
1759                         }  /* for */
1760                         if (j >= new_num_entries) {
1761                                 int xopt = opt;
1762
1763                                 /* sometimes we did not detect, that it is replaced by a Const */
1764                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1765                                         ir_op *op = get_irn_op(new_node_array[0]);
1766
1767                                         if (op == op_Const || op == op_SymConst)
1768                                                 xopt = HOOK_OPT_CONFIRM_C;
1769                                 }  /* if */
1770
1771                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1772                         }  /* if */
1773                 }  /* for */
1774         }
1775         STAT_LEAVE;
1776 }  /* stat_merge_nodes */
1777
1778 /**
1779  * Hook: Reassociation is started/stopped.
1780  *
1781  * @param ctx   the hook context
1782  * @param flag  if non-zero, reassociation is started else stopped
1783  */
1784 static void stat_reassociate(void *ctx, int flag)
1785 {
1786         (void) ctx;
1787         if (! status->stat_options)
1788                 return;
1789
1790         STAT_ENTER;
1791         {
1792                 status->reassoc_run = flag;
1793         }
1794         STAT_LEAVE;
1795 }  /* stat_reassociate */
1796
1797 /**
1798  * Hook: A node was lowered into other nodes
1799  *
1800  * @param ctx  the hook context
1801  * @param node the IR node that will be lowered
1802  */
1803 static void stat_lower(void *ctx, ir_node *node)
1804 {
1805         (void) ctx;
1806         if (! status->stat_options)
1807                 return;
1808
1809         STAT_ENTER;
1810         {
1811                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1812
1813                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1814         }
1815         STAT_LEAVE;
1816 }  /* stat_lower */
1817
1818 /**
1819  * Hook: A graph was inlined.
1820  *
1821  * @param ctx  the hook context
1822  * @param call the IR call that will re changed into the body of
1823  *             the called IR graph
1824  * @param called_irg  the IR graph representing the called routine
1825  */
1826 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1827 {
1828         (void) ctx;
1829         if (! status->stat_options)
1830                 return;
1831
1832         STAT_ENTER;
1833         {
1834                 ir_graph *irg = get_irn_irg(call);
1835                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1836                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1837
1838                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1839                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1840         }
1841         STAT_LEAVE;
1842 }  /* stat_inline */
1843
1844 /**
1845  * Hook: A graph with tail-recursions was optimized.
1846  *
1847  * @param ctx  the hook context
1848  */
1849 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1850 {
1851         (void) ctx;
1852         if (! status->stat_options)
1853                 return;
1854
1855         STAT_ENTER;
1856         {
1857                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1858
1859                 graph->num_tail_recursion += n_calls;
1860         }
1861         STAT_LEAVE;
1862 }  /* stat_tail_rec */
1863
1864 /**
1865  * Strength reduction was performed on an iteration variable.
1866  *
1867  * @param ctx  the hook context
1868  */
1869 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1870 {
1871         (void) ctx;
1872         if (! status->stat_options)
1873                 return;
1874
1875         STAT_ENTER;
1876         {
1877                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1878                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1879
1880                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1881         }
1882         STAT_LEAVE;
1883 }  /* stat_strength_red */
1884
1885 /**
1886  * Hook: Start/Stop the dead node elimination.
1887  *
1888  * @param ctx  the hook context
1889  */
1890 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1891 {
1892         (void) ctx;
1893         (void) irg;
1894         if (! status->stat_options)
1895                 return;
1896
1897         status->in_dead_node_elim = (start != 0);
1898 }  /* stat_dead_node_elim */
1899
1900 /**
1901  * Hook: if-conversion was tried.
1902  */
1903 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1904                                int pos, ir_node *mux, if_result_t reason)
1905 {
1906         (void) context;
1907         (void) phi;
1908         (void) pos;
1909         (void) mux;
1910         if (! status->stat_options)
1911                 return;
1912
1913         STAT_ENTER;
1914         {
1915                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1916
1917                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1918         }
1919         STAT_LEAVE;
1920 }  /* stat_if_conversion */
1921
1922 /**
1923  * Hook: real function call was optimized.
1924  */
1925 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1926 {
1927         (void) context;
1928         (void) call;
1929         if (! status->stat_options)
1930                 return;
1931
1932         STAT_ENTER;
1933         {
1934                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1935
1936                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1937         }
1938         STAT_LEAVE;
1939 }  /* stat_func_call */
1940
1941 /**
1942  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1943  *
1944  * @param ctx  the hook context
1945  */
1946 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1947 {
1948         (void) ctx;
1949         if (! status->stat_options)
1950                 return;
1951
1952         STAT_ENTER;
1953         {
1954                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1955                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1956         }
1957         STAT_LEAVE;
1958 }  /* stat_arch_dep_replace_mul_with_shifts */
1959
1960 /**
1961  * Hook: A division by const was replaced.
1962  *
1963  * @param ctx   the hook context
1964  * @param node  the division node that will be optimized
1965  */
1966 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1967 {
1968         (void) ctx;
1969         if (! status->stat_options)
1970                 return;
1971
1972         STAT_ENTER;
1973         {
1974                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1975                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1976         }
1977         STAT_LEAVE;
1978 }  /* stat_arch_dep_replace_division_by_const */
1979
1980 /*
1981  * Update the register pressure of a block.
1982  *
1983  * @param irg        the irg containing the block
1984  * @param block      the block for which the reg pressure should be set
1985  * @param pressure   the pressure
1986  * @param class_name the name of the register class
1987  */
1988 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1989 {
1990         if (! status->stat_options)
1991                 return;
1992
1993         STAT_ENTER;
1994         {
1995                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1996                 be_block_entry_t     *block_ent;
1997                 reg_pressure_entry_t *rp_ent;
1998
1999                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2000                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
2001
2002                 rp_ent->class_name = class_name;
2003                 rp_ent->pressure   = pressure;
2004
2005                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2006         }
2007         STAT_LEAVE;
2008 }  /* stat_be_block_regpressure */
2009
2010 /**
2011  * Update the distribution of ready nodes of a block
2012  *
2013  * @param irg        the irg containing the block
2014  * @param block      the block for which the reg pressure should be set
2015  * @param num_ready  the number of ready nodes
2016  */
2017 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2018 {
2019         if (! status->stat_options)
2020                 return;
2021
2022         STAT_ENTER;
2023         {
2024                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2025                 be_block_entry_t *block_ent;
2026
2027                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2028
2029                 /* increase the counter of corresponding number of ready nodes */
2030                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2031         }
2032         STAT_LEAVE;
2033 }  /* stat_be_block_sched_ready */
2034
2035 /**
2036  * Update the permutation statistic of a block.
2037  *
2038  * @param class_name the name of the register class
2039  * @param n_regs     number of registers in the register class
2040  * @param perm       the perm node
2041  * @param block      the block containing the perm
2042  * @param size       the size of the perm
2043  * @param real_size  number of pairs with different registers
2044  */
2045 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2046                              int size, int real_size)
2047 {
2048         if (! status->stat_options)
2049                 return;
2050
2051         STAT_ENTER;
2052         {
2053                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2054                 be_block_entry_t   *block_ent;
2055                 perm_class_entry_t *pc_ent;
2056                 perm_stat_entry_t  *ps_ent;
2057
2058                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2059                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2060                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2061
2062                 pc_ent->n_regs = n_regs;
2063
2064                 /* update information */
2065                 ps_ent->size      = size;
2066                 ps_ent->real_size = real_size;
2067         }
2068         STAT_LEAVE;
2069 }  /* stat_be_block_stat_perm */
2070
2071 /**
2072  * Update the permutation statistic of a single perm.
2073  *
2074  * @param class_name the name of the register class
2075  * @param perm       the perm node
2076  * @param block      the block containing the perm
2077  * @param is_chain   1 if chain, 0 if cycle
2078  * @param size       length of the cycle/chain
2079  * @param n_ops      the number of ops representing this cycle/chain after lowering
2080  */
2081 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2082                                   int is_chain, int size, int n_ops)
2083 {
2084         if (! status->stat_options)
2085                 return;
2086
2087         STAT_ENTER;
2088         {
2089                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2090                 be_block_entry_t   *block_ent;
2091                 perm_class_entry_t *pc_ent;
2092                 perm_stat_entry_t  *ps_ent;
2093
2094                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2095                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2096                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2097
2098                 if (is_chain) {
2099                         ps_ent->n_copies += n_ops;
2100                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2101                 } else {
2102                         ps_ent->n_exchg += n_ops;
2103                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2104                 }  /* if */
2105         }
2106         STAT_LEAVE;
2107 }  /* stat_be_block_stat_permcycle */
2108
2109 /* Dumps a statistics snapshot. */
2110 void stat_dump_snapshot(const char *name, const char *phase)
2111 {
2112         char fname[2048];
2113         const char *p;
2114         size_t l;
2115
2116         if (! status->stat_options)
2117                 return;
2118
2119         STAT_ENTER;
2120         {
2121                 graph_entry_t *entry;
2122                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2123
2124                 /*
2125                  * The constant counter is only global, so we clear it here.
2126                  * Note that it does NOT contain the constants in DELETED
2127                  * graphs due to this.
2128                  */
2129                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2130                         stat_const_clear(status);
2131
2132                 /* build the name */
2133                 p = strrchr(name, '/');
2134 #ifdef _WIN32
2135                 {
2136                         const char *q;
2137
2138                         q = strrchr(name, '\\');
2139
2140                         /* NULL might be not the smallest pointer */
2141                         if (q && (!p || q > p))
2142                                 p = q;
2143                 }
2144 #endif /* _WIN32 */
2145                 if (p) {
2146                         ++p;
2147                         l = p - name;
2148
2149                         if (l > (int) (sizeof(fname) - 1))
2150                                 l = sizeof(fname) - 1;
2151
2152                         memcpy(fname, name, l);
2153                         fname[l] = '\0';
2154                 } else {
2155                         fname[0] = '\0';
2156                         p = name;
2157                 }  /* if */
2158                 strncat(fname, "firmstat-", sizeof(fname)-1);
2159                 strncat(fname, phase, sizeof(fname)-1);
2160                 strncat(fname, "-", sizeof(fname)-1);
2161                 strncat(fname, p, sizeof(fname)-1);
2162
2163                 stat_dump_init(fname);
2164
2165                 /* calculate the graph statistics */
2166                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2167                       entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2168                         if (entry->irg == NULL) {
2169                                 /* special entry for the global count */
2170                                 continue;
2171                         }  /* if */
2172                         if (! entry->is_deleted) {
2173                                 /* the graph is still alive, count the nodes on it */
2174                                 update_graph_stat(global, entry);
2175                         }  /* if */
2176                 }  /* for */
2177
2178                 /* some calculations are dependent, we pushed them on the wait_q */
2179                 while (! pdeq_empty(status->wait_q)) {
2180                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2181
2182                         update_graph_stat_2(global, entry);
2183                 }  /* while */
2184
2185                 /* dump per graph */
2186                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2187                      entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2188                         if (entry->irg == NULL) {
2189                                 /* special entry for the global count */
2190                                 continue;
2191                         }  /* if */
2192
2193                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2194                                 stat_dump_graph(entry);
2195                                 stat_dump_registered(entry);
2196                         }  /* if */
2197
2198                         if (! entry->is_deleted) {
2199                                 /* clear the counter that are not accumulated */
2200                                 graph_clear_entry(entry, 0);
2201                         }  /* if */
2202                 }  /* for */
2203
2204                 /* dump global */
2205                 stat_dump_graph(global);
2206
2207                 /* dump the const info */
2208                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2209                         stat_dump_consts(&status->const_info);
2210
2211                 /* dump the parameter distribution */
2212                 stat_dump_param_tbl(status->dist_param_cnt, global);
2213
2214                 /* dump the optimization counter and clear them */
2215                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2216                 clear_optimization_counter();
2217
2218                 stat_dump_finish();
2219
2220                 stat_finish_pattern_history(fname);
2221
2222                 /* clear the global counters here */
2223                 {
2224                         node_entry_t *entry;
2225
2226                         for (entry = (node_entry_t*)pset_first(global->opcode_hash);
2227                              entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) {
2228                                 opcode_clear_entry(entry);
2229                         }  /* for */
2230                         /* clear all global counter */
2231                         graph_clear_entry(global, /*all=*/1);
2232                 }
2233         }
2234         STAT_LEAVE;
2235 }  /* stat_dump_snapshot */
2236
2237 typedef struct pass_t {
2238         ir_prog_pass_t pass;
2239         const char     *fname;
2240         const char     *phase;
2241 } pass_t;
2242
2243 /**
2244  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2245  */
2246 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2247 {
2248         pass_t *pass = (pass_t*)context;
2249
2250         (void)irp;
2251         stat_dump_snapshot(pass->fname, pass->phase);
2252         return 0;
2253 }  /* stat_dump_snapshot_wrapper */
2254
2255 /**
2256  * Ensure that no verifier is run from the wrapper.
2257  */
2258 static int no_verify(ir_prog *prog, void *ctx)
2259 {
2260         (void)prog;
2261         (void)ctx;
2262         return 0;
2263 }
2264
2265 /**
2266  * Ensure that no dumper is run from the wrapper.
2267  */
2268 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2269 {
2270         (void)prog;
2271         (void)ctx;
2272         (void)idx;
2273 }
2274
2275 /* create an ir_pog pass */
2276 ir_prog_pass_t *stat_dump_snapshot_pass(
2277         const char *name, const char *fname, const char *phase)
2278 {
2279         pass_t *pass = XMALLOCZ(pass_t);
2280
2281         def_prog_pass_constructor(
2282                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2283         pass->fname = fname;
2284         pass->phase = phase;
2285
2286         /* no dump/verify */
2287         pass->pass.dump_irprog   = no_dump;
2288         pass->pass.verify_irprog = no_verify;
2289
2290         return &pass->pass;
2291 }  /* stat_dump_snapshot_pass */
2292
2293 /** the hook entries for the Firm statistics module */
2294 static hook_entry_t stat_hooks[hook_last];
2295
2296 /* initialize the statistics module. */
2297 void firm_init_stat(unsigned enable_options)
2298 {
2299 #define X(a)  a, sizeof(a)-1
2300 #define HOOK(h, fkt) \
2301         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2302         unsigned num = 0;
2303
2304         if (! (enable_options & FIRMSTAT_ENABLED))
2305                 return;
2306
2307         status = XMALLOCZ(stat_info_t);
2308
2309         /* enable statistics */
2310         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2311
2312         /* register all hooks */
2313         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2314         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2315         HOOK(hook_new_node,                           stat_new_node);
2316         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2317         HOOK(hook_normalize,                          stat_normalize);
2318         HOOK(hook_new_graph,                          stat_new_graph);
2319         HOOK(hook_free_graph,                         stat_free_graph);
2320         HOOK(hook_irg_walk,                           stat_irg_walk);
2321         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2322         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2323         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2324         HOOK(hook_reassociate,                        stat_reassociate);
2325         HOOK(hook_lower,                              stat_lower);
2326         HOOK(hook_inline,                             stat_inline);
2327         HOOK(hook_tail_rec,                           stat_tail_rec);
2328         HOOK(hook_strength_red,                       stat_strength_red);
2329         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2330         HOOK(hook_if_conversion,                      stat_if_conversion);
2331         HOOK(hook_func_call,                          stat_func_call);
2332         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2333         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2334
2335         obstack_init(&status->cnts);
2336         obstack_init(&status->be_data);
2337
2338         /* create the hash-tables */
2339         status->irg_hash   = new_pset(graph_cmp, 8);
2340         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2341
2342         /* create the wait queue */
2343         status->wait_q     = new_pdeq();
2344
2345         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2346                 /* build the pseudo-ops */
2347
2348                 _op_Phi0.code    = --num;
2349                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2350
2351                 _op_PhiM.code    = --num;
2352                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2353
2354                 _op_ProjM.code   = --num;
2355                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2356
2357                 _op_MulC.code    = --num;
2358                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2359
2360                 _op_DivC.code    = --num;
2361                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2362
2363                 _op_ModC.code    = --num;
2364                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2365
2366                 status->op_Phi0    = &_op_Phi0;
2367                 status->op_PhiM    = &_op_PhiM;
2368                 status->op_ProjM   = &_op_ProjM;
2369                 status->op_MulC    = &_op_MulC;
2370                 status->op_DivC    = &_op_DivC;
2371                 status->op_ModC    = &_op_ModC;
2372         } else {
2373                 status->op_Phi0    = NULL;
2374                 status->op_PhiM    = NULL;
2375                 status->op_ProjM   = NULL;
2376                 status->op_MulC    = NULL;
2377                 status->op_DivC    = NULL;
2378                 status->op_ModC    = NULL;
2379         }  /* if */
2380
2381         /* for Florian: count the Sel depth */
2382         if (enable_options & FIRMSTAT_COUNT_SELS) {
2383                 _op_SelSel.code    = --num;
2384                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2385
2386                 _op_SelSelSel.code = --num;
2387                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2388
2389                 status->op_SelSel    = &_op_SelSel;
2390                 status->op_SelSelSel = &_op_SelSelSel;
2391         } else {
2392                 status->op_SelSel    = NULL;
2393                 status->op_SelSelSel = NULL;
2394         }  /* if */
2395
2396         /* register the dumper */
2397         stat_register_dumper(&simple_dumper);
2398
2399         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2400                 stat_register_dumper(&csv_dumper);
2401
2402         /* initialize the pattern hash */
2403         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2404
2405         /* initialize the Const options */
2406         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2407                 stat_init_const_cnt(status);
2408
2409         /* distribution table for parameter counts */
2410         status->dist_param_cnt = stat_new_int_distrib_tbl();
2411
2412         clear_optimization_counter();
2413
2414 #undef HOOK
2415 #undef X
2416 }  /* firm_init_stat */
2417
2418 /**
2419  * Frees all dumper structures.
2420  */
2421 static void stat_term_dumper(void)
2422 {
2423         dumper_t *dumper, *next_dumper;
2424
2425         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2426                 if (dumper->func_map)
2427                         del_pset(dumper->func_map);
2428
2429                 next_dumper = dumper->next;
2430                 free(dumper);
2431                 dumper = next_dumper;
2432         }  /* for */
2433 }  /* stat_term_dumper */
2434
2435
2436 /* Terminates the statistics module, frees all memory. */
2437 void stat_term(void)
2438 {
2439         if (status != (stat_info_t *)&status_disable) {
2440                 obstack_free(&status->be_data, NULL);
2441                 obstack_free(&status->cnts, NULL);
2442
2443                 stat_term_dumper();
2444
2445                 xfree(status);
2446                 status = (stat_info_t *)&status_disable;
2447         }
2448 }  /* stat_term */
2449
2450 /* returns 1 if statistics were initialized, 0 otherwise */
2451 int stat_is_active(void)
2452 {
2453         return status != (stat_info_t *)&status_disable;
2454 }  /* stat_is_active */
2455
2456 #else
2457
2458 /* initialize the statistics module. */
2459 void firm_init_stat(unsigned enable_options) {}
2460
2461 /* Dumps a statistics snapshot */
2462 void stat_dump_snapshot(const char *name, const char *phase) {}
2463
2464 /* terminates the statistics module, frees all memory */
2465 void stat_term(void);
2466
2467 #endif /* FIRM_STATISTICS */