Put opening curly brace of functions on a separate line. Last time I missed some.
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2008 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44
45 /*
46  * need this to be static:
47  * Special pseudo Opcodes that we need to count some interesting cases
48  */
49
50 /**
51  * The Phi0, a node that is created during SSA construction
52  */
53 static ir_op _op_Phi0;
54
55 /** The PhiM, just to count memory Phi's. */
56 static ir_op _op_PhiM;
57
58 /** The Mul by Const node. */
59 static ir_op _op_MulC;
60
61 /** The Div by Const node. */
62 static ir_op _op_DivC;
63
64 /** The Div by Const node. */
65 static ir_op _op_ModC;
66
67 /** The Div by Const node. */
68 static ir_op _op_DivModC;
69
70 /** The Quot by Const node. */
71 static ir_op _op_QuotC;
72
73 /** The memory Proj node. */
74 static ir_op _op_ProjM;
75
76 /** A Sel of a Sel */
77 static ir_op _op_SelSel;
78
79 /** A Sel of a Sel of a Sel */
80 static ir_op _op_SelSelSel;
81
82 /* ---------------------------------------------------------------------------------- */
83
84 /** Marks the begin of a statistic (hook) function. */
85 #define STAT_ENTER              ++status->recursive
86
87 /** Marks the end of a statistic (hook) functions. */
88 #define STAT_LEAVE              --status->recursive
89
90 /** Allows to enter a statistic function only when we are not already in a hook. */
91 #define STAT_ENTER_SINGLE       do { if (status->recursive > 0) return; ++status->recursive; } while (0)
92
93 /**
94  * global status
95  */
96 static const unsigned status_disable = 0;
97 static stat_info_t *status = (stat_info_t *)&status_disable;
98
99 /**
100  * Compare two elements of the opcode hash.
101  */
102 static int opcode_cmp(const void *elt, const void *key)
103 {
104         const node_entry_t *e1 = elt;
105         const node_entry_t *e2 = key;
106
107         return e1->op->code - e2->op->code;
108 }  /* opcode_cmp */
109
110 /**
111  * Compare two elements of the graph hash.
112  */
113 static int graph_cmp(const void *elt, const void *key)
114 {
115         const graph_entry_t *e1 = elt;
116         const graph_entry_t *e2 = key;
117
118         return e1->irg != e2->irg;
119 }  /* graph_cmp */
120
121 /**
122  * Compare two elements of the optimization hash.
123  */
124 static int opt_cmp(const void *elt, const void *key)
125 {
126         const opt_entry_t *e1 = elt;
127         const opt_entry_t *e2 = key;
128
129         return e1->op->code != e2->op->code;
130 }  /* opt_cmp */
131
132 /**
133  * Compare two elements of the block/extbb hash.
134  */
135 static int block_cmp(const void *elt, const void *key)
136 {
137         const block_entry_t *e1 = elt;
138         const block_entry_t *e2 = key;
139
140         /* it's enough to compare the block number */
141         return e1->block_nr != e2->block_nr;
142 }  /* block_cmp */
143
144 /**
145  * Compare two elements of the be_block hash.
146  */
147 static int be_block_cmp(const void *elt, const void *key)
148 {
149         const be_block_entry_t *e1 = elt;
150         const be_block_entry_t *e2 = key;
151
152         return e1->block_nr != e2->block_nr;
153 }  /* be_block_cmp */
154
155 /**
156  * Compare two elements of reg pressure hash.
157  */
158 static int reg_pressure_cmp(const void *elt, const void *key)
159 {
160         const reg_pressure_entry_t *e1 = elt;
161         const reg_pressure_entry_t *e2 = key;
162
163         return e1->class_name != e2->class_name;
164 }  /* reg_pressure_cmp */
165
166 /**
167  * Compare two elements of the perm_stat hash.
168  */
169 static int perm_stat_cmp(const void *elt, const void *key)
170 {
171         const perm_stat_entry_t *e1 = elt;
172         const perm_stat_entry_t *e2 = key;
173
174         return e1->perm != e2->perm;
175 }  /* perm_stat_cmp */
176
177 /**
178  * Compare two elements of the perm_class hash.
179  */
180 static int perm_class_cmp(const void *elt, const void *key)
181 {
182         const perm_class_entry_t *e1 = elt;
183         const perm_class_entry_t *e2 = key;
184
185         return e1->class_name != e2->class_name;
186 }  /* perm_class_cmp */
187
188 /**
189  * Compare two elements of the ir_op hash.
190  */
191 static int opcode_cmp_2(const void *elt, const void *key)
192 {
193         const ir_op *e1 = elt;
194         const ir_op *e2 = key;
195
196         return e1->code != e2->code;
197 }  /* opcode_cmp_2 */
198
199 /**
200  * Compare two elements of the address_mark set.
201  */
202 static int address_mark_cmp(const void *elt, const void *key, size_t size)
203 {
204         const address_mark_entry_t *e1 = elt;
205         const address_mark_entry_t *e2 = key;
206         (void) size;
207
208         /* compare only the nodes, the rest is used as data container */
209         return e1->node != e2->node;
210 }  /* address_mark_cmp */
211
212 /**
213  * Clear all counter in a node_entry_t.
214  */
215 static void opcode_clear_entry(node_entry_t *elem)
216 {
217         cnt_clr(&elem->cnt_alive);
218         cnt_clr(&elem->new_node);
219         cnt_clr(&elem->into_Id);
220         cnt_clr(&elem->normalized);
221 }  /* opcode_clear_entry */
222
223 /**
224  * Returns the associates node_entry_t for an ir_op (and allocates
225  * one if not yet available).
226  *
227  * @param op    the IR operation
228  * @param hmap  a hash map containing ir_op* -> node_entry_t*
229  */
230 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
231 {
232         node_entry_t key;
233         node_entry_t *elem;
234
235         key.op = op;
236
237         elem = pset_find(hmap, &key, op->code);
238         if (elem)
239                 return elem;
240
241         elem = OALLOCZ(&status->cnts, node_entry_t);
242
243         /* clear counter */
244         opcode_clear_entry(elem);
245
246         elem->op = op;
247
248         return pset_insert(hmap, elem, op->code);
249 }  /* opcode_get_entry */
250
251 /**
252  * Returns the associates ir_op for an opcode
253  *
254  * @param code  the IR opcode
255  * @param hmap  the hash map containing opcode -> ir_op*
256  */
257 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
258 {
259         ir_op key;
260
261         key.code = code;
262         return pset_find(hmap, &key, code);
263 }  /* opcode_find_entry */
264
265 /**
266  * Clears all counter in a graph_entry_t.
267  *
268  * @param elem  the graph entry
269  * @param all   if non-zero, clears all counters, else leave accumulated ones
270  */
271 static void graph_clear_entry(graph_entry_t *elem, int all)
272 {
273         int i;
274
275         /* clear accumulated / non-accumulated counter */
276         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
277                 cnt_clr(&elem->cnt[i]);
278         }  /* for */
279
280         if (elem->block_hash) {
281                 del_pset(elem->block_hash);
282                 elem->block_hash = NULL;
283         }  /* if */
284
285         if (elem->extbb_hash) {
286                 del_pset(elem->extbb_hash);
287                 elem->extbb_hash = NULL;
288         }  /* if */
289
290         obstack_free(&elem->recalc_cnts, NULL);
291         obstack_init(&elem->recalc_cnts);
292 }  /* graph_clear_entry */
293
294 /**
295  * Returns the associated graph_entry_t for an IR graph.
296  *
297  * @param irg   the IR graph, NULL for the global counter
298  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
299  */
300 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
301 {
302         graph_entry_t key;
303         graph_entry_t *elem;
304         size_t i;
305
306         key.irg = irg;
307
308         elem = pset_find(hmap, &key, HASH_PTR(irg));
309
310         if (elem) {
311                 /* create hash map backend block information */
312                 if (! elem->be_block_hash)
313                         elem->be_block_hash = new_pset(be_block_cmp, 5);
314
315                 return elem;
316         }  /* if */
317
318         /* allocate a new one */
319         elem = OALLOCZ(&status->cnts, graph_entry_t);
320         obstack_init(&elem->recalc_cnts);
321
322         /* clear counter */
323         graph_clear_entry(elem, 1);
324
325         /* new hash table for opcodes here  */
326         elem->opcode_hash   = new_pset(opcode_cmp, 5);
327         elem->address_mark  = new_set(address_mark_cmp, 5);
328         elem->irg           = irg;
329
330         /* these hash tables are created on demand */
331         elem->block_hash = NULL;
332         elem->extbb_hash = NULL;
333
334         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
335                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
336
337         return pset_insert(hmap, elem, HASH_PTR(irg));
338 }  /* graph_get_entry */
339
340 /**
341  * Clear all counter in an opt_entry_t.
342  */
343 static void opt_clear_entry(opt_entry_t *elem)
344 {
345         cnt_clr(&elem->count);
346 }  /* opt_clear_entry */
347
348 /**
349  * Returns the associated opt_entry_t for an IR operation.
350  *
351  * @param op    the IR operation
352  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
353  */
354 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
355 {
356         opt_entry_t key;
357         opt_entry_t *elem;
358
359         key.op = op;
360
361         elem = pset_find(hmap, &key, op->code);
362         if (elem)
363                 return elem;
364
365         elem = OALLOCZ(&status->cnts, opt_entry_t);
366
367         /* clear new counter */
368         opt_clear_entry(elem);
369
370         elem->op = op;
371
372         return pset_insert(hmap, elem, op->code);
373 }  /* opt_get_entry */
374
375 /**
376  * clears all counter in a block_entry_t
377  */
378 static void block_clear_entry(block_entry_t *elem)
379 {
380         int i;
381
382         for (i = 0; i < _bcnt_last; ++i)
383                 cnt_clr(&elem->cnt[i]);
384 }  /* block_clear_entry */
385
386 /**
387  * Returns the associated block_entry_t for an block.
388  *
389  * @param block_nr  an IR  block number
390  * @param hmap      a hash map containing long -> block_entry_t
391  */
392 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
393 {
394         block_entry_t key;
395         block_entry_t *elem;
396
397         key.block_nr = block_nr;
398
399         elem = pset_find(hmap, &key, block_nr);
400         if (elem)
401                 return elem;
402
403         elem = OALLOCZ(obst, block_entry_t);
404
405         /* clear new counter */
406         block_clear_entry(elem);
407
408         elem->block_nr = block_nr;
409
410         return pset_insert(hmap, elem, block_nr);
411 }  /* block_get_entry */
412
413 /**
414  * Clear all sets in be_block_entry_t.
415  */
416 static void be_block_clear_entry(be_block_entry_t *elem)
417 {
418         if (elem->reg_pressure)
419                 del_pset(elem->reg_pressure);
420
421         if (elem->sched_ready)
422                 stat_delete_distrib_tbl(elem->sched_ready);
423
424         if (elem->perm_class_stat)
425                 del_pset(elem->perm_class_stat);
426
427         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
428         elem->sched_ready     = stat_new_int_distrib_tbl();
429         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
430 }  /* be_block_clear_entry */
431
432 /**
433  * Returns the associated be_block_entry_t for an block.
434  *
435  * @param block_nr  an IR  block number
436  * @param hmap      a hash map containing long -> be_block_entry_t
437  */
438 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
439 {
440         be_block_entry_t key;
441         be_block_entry_t *elem;
442
443         key.block_nr = block_nr;
444
445         elem = pset_find(hmap, &key, block_nr);
446         if (elem)
447                 return elem;
448
449         elem = OALLOCZ(obst, be_block_entry_t);
450
451         /* clear new counter */
452         be_block_clear_entry(elem);
453
454         elem->block_nr = block_nr;
455
456         return pset_insert(hmap, elem, block_nr);
457 }  /* be_block_get_entry */
458
459 /**
460  * clears all sets in perm_class_entry_t
461  */
462 static void perm_class_clear_entry(perm_class_entry_t *elem)
463 {
464         if (elem->perm_stat)
465                 del_pset(elem->perm_stat);
466
467         elem->perm_stat = new_pset(perm_stat_cmp, 5);
468 }  /* perm_class_clear_entry */
469
470 /**
471  * Returns the associated perm_class entry for a register class.
472  *
473  * @param class_name  the register class name
474  * @param hmap        a hash map containing class_name -> perm_class_entry_t
475  */
476 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
477                                                 hmap_perm_class_entry_t *hmap)
478 {
479         perm_class_entry_t key;
480         perm_class_entry_t *elem;
481
482         key.class_name = class_name;
483
484         elem = pset_find(hmap, &key, HASH_PTR(class_name));
485         if (elem)
486                 return elem;
487
488         elem = OALLOCZ(obst, perm_class_entry_t);
489
490         /* clear new counter */
491         perm_class_clear_entry(elem);
492
493         elem->class_name = class_name;
494
495         return pset_insert(hmap, elem, HASH_PTR(class_name));
496 }  /* perm_class_get_entry */
497
498 /**
499  * clears all sets in perm_stat_entry_t
500  */
501 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
502 {
503         if (elem->chains)
504                 stat_delete_distrib_tbl(elem->chains);
505
506         if (elem->cycles)
507                 stat_delete_distrib_tbl(elem->cycles);
508
509         elem->chains = stat_new_int_distrib_tbl();
510         elem->cycles = stat_new_int_distrib_tbl();
511 }  /* perm_stat_clear_entry */
512
513 /**
514  * Returns the associated perm_stat entry for a perm.
515  *
516  * @param perm      the perm node
517  * @param hmap      a hash map containing perm -> perm_stat_entry_t
518  */
519 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
520 {
521         perm_stat_entry_t key;
522         perm_stat_entry_t *elem;
523
524         key.perm = perm;
525
526         elem = pset_find(hmap, &key, HASH_PTR(perm));
527         if (elem)
528                 return elem;
529
530         elem = OALLOCZ(obst, perm_stat_entry_t);
531
532         /* clear new counter */
533         perm_stat_clear_entry(elem);
534
535         elem->perm = perm;
536
537         return pset_insert(hmap, elem, HASH_PTR(perm));
538 }  /* perm_stat_get_entry */
539
540 /**
541  * Clear optimizations counter,
542  */
543 static void clear_optimization_counter(void)
544 {
545         int i;
546         for (i = 0; i < FS_OPT_MAX; ++i)
547                 cnt_clr(&status->num_opts[i]);
548 }
549
550 /**
551  * Returns the ir_op for an IR-node,
552  * handles special cases and return pseudo op codes.
553  *
554  * @param none  an IR node
555  */
556 static ir_op *stat_get_irn_op(ir_node *node)
557 {
558         ir_op *op = get_irn_op(node);
559         ir_opcode opc = op->code;
560
561         switch (opc) {
562         case iro_Phi:
563                 if (get_irn_arity(node) == 0) {
564                         /* special case, a Phi0 node, count on extra counter */
565                         op = status->op_Phi0 ? status->op_Phi0 : op;
566                 } else if (get_irn_mode(node) == mode_M) {
567                         /* special case, a Memory Phi node, count on extra counter */
568                         op = status->op_PhiM ? status->op_PhiM : op;
569                 }  /* if */
570                 break;
571         case iro_Proj:
572                 if (get_irn_mode(node) == mode_M) {
573                         /* special case, a Memory Proj node, count on extra counter */
574                         op = status->op_ProjM ? status->op_ProjM : op;
575                 }  /* if */
576                 break;
577         case iro_Mul:
578                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
579                         /* special case, a Multiply by a const, count on extra counter */
580                         op = status->op_MulC ? status->op_MulC : op;
581                 }  /* if */
582                 break;
583         case iro_Div:
584                 if (is_Const(get_Div_right(node))) {
585                         /* special case, a division by a const, count on extra counter */
586                         op = status->op_DivC ? status->op_DivC : op;
587                 }  /* if */
588                 break;
589         case iro_Mod:
590                 if (is_Const(get_Mod_right(node))) {
591                         /* special case, a module by a const, count on extra counter */
592                         op = status->op_ModC ? status->op_ModC : op;
593                 }  /* if */
594                 break;
595         case iro_DivMod:
596                 if (is_Const(get_DivMod_right(node))) {
597                         /* special case, a division/modulo by a const, count on extra counter */
598                         op = status->op_DivModC ? status->op_DivModC : op;
599                 }  /* if */
600                 break;
601         case iro_Quot:
602                 if (is_Const(get_Quot_right(node))) {
603                         /* special case, a floating point division by a const, count on extra counter */
604                         op = status->op_QuotC ? status->op_QuotC : op;
605                 }  /* if */
606                 break;
607         case iro_Sel:
608                 if (is_Sel(get_Sel_ptr(node))) {
609                         /* special case, a Sel of a Sel, count on extra counter */
610                         op = status->op_SelSel ? status->op_SelSel : op;
611                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
612                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
613                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
614                         }  /* if */
615                 }  /* if */
616                 break;
617         default:
618                 ;
619         }  /* switch */
620
621         return op;
622 }  /* stat_get_irn_op */
623
624 /**
625  * update the block counter
626  */
627 static void undate_block_info(ir_node *node, graph_entry_t *graph)
628 {
629         ir_op *op = get_irn_op(node);
630         ir_node *block;
631         block_entry_t *b_entry;
632         int i, arity;
633
634         /* check for block */
635         if (op == op_Block) {
636                 arity = get_irn_arity(node);
637                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
638                 /* mark start end block to allow to filter them out */
639                 if (node == get_irg_start_block(graph->irg))
640                         b_entry->is_start = 1;
641                 else if (node == get_irg_end_block(graph->irg))
642                         b_entry->is_end = 1;
643
644                 /* count all incoming edges */
645                 for (i = 0; i < arity; ++i) {
646                         ir_node *pred = get_irn_n(node, i);
647                         ir_node *other_block = get_nodes_block(pred);
648                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
649
650                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
651                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
652                 }  /* for */
653                 return;
654         }  /* if */
655
656         block   = get_nodes_block(node);
657         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
658
659         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
660                 /* count data Phi per block */
661                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
662         }  /* if */
663
664         /* we have a new node in our block */
665         cnt_inc(&b_entry->cnt[bcnt_nodes]);
666
667         /* don't count keep-alive edges */
668         if (is_End(node))
669                 return;
670
671         arity = get_irn_arity(node);
672
673         for (i = 0; i < arity; ++i) {
674                 ir_node *pred = get_irn_n(node, i);
675                 ir_node *other_block;
676
677                 other_block = get_nodes_block(pred);
678
679                 if (other_block == block)
680                         cnt_inc(&b_entry->cnt[bcnt_edges]);     /* a in block edge */
681                 else {
682                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
683
684                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
685                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
686                 }  /* if */
687         }  /* for */
688 }  /* undate_block_info */
689
690 /**
691  * Update the extended block counter.
692  */
693 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
694 {
695         ir_op *op = get_irn_op(node);
696         ir_extblk *extbb;
697         extbb_entry_t *eb_entry;
698         int i, arity;
699
700         /* check for block */
701         if (op == op_Block) {
702                 extbb = get_nodes_extbb(node);
703                 arity = get_irn_arity(node);
704                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
705
706                 /* count all incoming edges */
707                 for (i = 0; i < arity; ++i) {
708                         ir_node *pred = get_irn_n(node, i);
709                         ir_extblk *other_extbb = get_nodes_extbb(pred);
710
711                         if (extbb != other_extbb) {
712                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
713
714                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
715                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
716                         }  /* if */
717                 }  /* for */
718                 return;
719         }  /* if */
720
721         extbb    = get_nodes_extbb(node);
722         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
723
724         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
725                 /* count data Phi per extbb */
726                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
727         }  /* if */
728
729         /* we have a new node in our block */
730         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
731
732         /* don't count keep-alive edges */
733         if (is_End(node))
734                 return;
735
736         arity = get_irn_arity(node);
737
738         for (i = 0; i < arity; ++i) {
739                 ir_node *pred = get_irn_n(node, i);
740                 ir_extblk *other_extbb = get_nodes_extbb(pred);
741
742                 if (other_extbb == extbb)
743                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
744                 else {
745                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
746
747                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
748                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
749                 }  /* if */
750         }  /* for */
751 }  /* update_extbb_info */
752
753 /**
754  * Calculates how many arguments of the call are const, updates
755  * param distribution.
756  */
757 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
758 {
759         int i, num_const_args = 0, num_local_adr = 0;
760         int n = get_Call_n_params(call);
761
762         for (i = 0; i < n; ++i) {
763                 ir_node *param = get_Call_param(call, i);
764
765                 if (is_irn_constlike(param))
766                         ++num_const_args;
767                 else if (is_Sel(param)) {
768                         ir_node *base = param;
769
770                         do {
771                                 base = get_Sel_ptr(base);
772                         } while (is_Sel(base));
773
774                         if (base == get_irg_frame(current_ir_graph))
775                                 ++num_local_adr;
776                 }
777
778         }  /* for */
779
780         if (num_const_args > 0)
781                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
782         if (num_const_args == n)
783                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
784         if (num_local_adr > 0)
785                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
786
787         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
788 }  /* analyse_params_of_Call */
789
790 /**
791  * Update info on calls.
792  *
793  * @param call   The call
794  * @param graph  The graph entry containing the call
795  */
796 static void stat_update_call(ir_node *call, graph_entry_t *graph)
797 {
798         ir_node   *block = get_nodes_block(call);
799         ir_node   *ptr = get_Call_ptr(call);
800         ir_entity *ent = NULL;
801         ir_graph  *callee = NULL;
802
803         /*
804          * If the block is bad, the whole subgraph will collapse later
805          * so do not count this call.
806          * This happens in dead code.
807          */
808         if (is_Bad(block))
809                 return;
810
811         cnt_inc(&graph->cnt[gcnt_all_calls]);
812
813         /* found a call, this function is not a leaf */
814         graph->is_leaf = 0;
815
816         if (is_SymConst(ptr)) {
817                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
818                         /* ok, we seems to know the entity */
819                         ent = get_SymConst_entity(ptr);
820                         callee = get_entity_irg(ent);
821
822                         /* it is recursive, if it calls at least once */
823                         if (callee == graph->irg)
824                                 graph->is_recursive = 1;
825                         if (callee == NULL)
826                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
827                 }  /* if */
828         } else {
829                 /* indirect call, be could not predict */
830                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
831
832                 /* NOT a leaf call */
833                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
834         }  /* if */
835
836         /* check, if it's a chain-call: Then, the call-block
837          * must dominate the end block. */
838         {
839                 ir_node *curr = get_irg_end_block(graph->irg);
840                 int depth = get_Block_dom_depth(block);
841
842                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
843                         curr = get_Block_idom(curr);
844
845                         if (! curr || is_no_Block(curr))
846                                 break;
847                 }  /* for */
848
849                 if (curr != block)
850                         graph->is_chain_call = 0;
851         }
852
853         /* check, if the callee is a leaf */
854         if (callee) {
855                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
856
857                 if (called->is_analyzed) {
858                         if (! called->is_leaf)
859                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
860                 }  /* if */
861         }  /* if */
862
863         analyse_params_of_Call(graph, call);
864 }  /* stat_update_call */
865
866 /**
867  * Update info on calls for graphs on the wait queue.
868  */
869 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
870 {
871         ir_node   *block = get_nodes_block(call);
872         ir_node   *ptr = get_Call_ptr(call);
873         ir_entity *ent = NULL;
874         ir_graph  *callee = NULL;
875
876         /*
877          * If the block is bad, the whole subgraph will collapse later
878          * so do not count this call.
879          * This happens in dead code.
880          */
881         if (is_Bad(block))
882                 return;
883
884         if (is_SymConst(ptr)) {
885                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
886                         /* ok, we seems to know the entity */
887                         ent = get_SymConst_entity(ptr);
888                         callee = get_entity_irg(ent);
889                 }  /* if */
890         }  /* if */
891
892         /* check, if the callee is a leaf */
893         if (callee) {
894                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
895
896                 assert(called->is_analyzed);
897
898                 if (! called->is_leaf)
899                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
900         } else
901                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
902 }  /* stat_update_call_2 */
903
904 /**
905  * Find the base address and entity of an Sel node.
906  *
907  * @param sel  the node
908  *
909  * @return the base address.
910  */
911 static ir_node *find_base_adr(ir_node *sel)
912 {
913         ir_node *ptr = get_Sel_ptr(sel);
914
915         while (is_Sel(ptr)) {
916                 sel = ptr;
917                 ptr = get_Sel_ptr(sel);
918         }
919         return ptr;
920 }  /* find_base_adr */
921
922 /**
923  * Update info on Load/Store address statistics.
924  */
925 static void stat_update_address(ir_node *node, graph_entry_t *graph)
926 {
927         ir_opcode opc = get_irn_opcode(node);
928         ir_node *base;
929         ir_graph *irg;
930
931         switch (opc) {
932         case iro_SymConst:
933                 /* a global address */
934                 cnt_inc(&graph->cnt[gcnt_global_adr]);
935                 break;
936         case iro_Sel:
937                 base = find_base_adr(node);
938                 irg = current_ir_graph;
939                 if (base == get_irg_tls(irg)) {
940                         /* a TLS variable, like a global. */
941                         cnt_inc(&graph->cnt[gcnt_global_adr]);
942                 } else if (base == get_irg_frame(irg)) {
943                         /* a local Variable. */
944                         cnt_inc(&graph->cnt[gcnt_local_adr]);
945                 } else {
946                         /* Pointer access */
947                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
948                                 /* pointer access through parameter, check for THIS */
949                                 ir_entity *ent = get_irg_entity(irg);
950
951                                 if (ent != NULL) {
952                                         ir_type *ent_tp = get_entity_type(ent);
953
954                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
955                                                 if (get_Proj_proj(base) == 0) {
956                                                         /* THIS pointer */
957                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
958                                                         goto end_parameter;
959                                                 }  /* if */
960                                         }  /* if */
961                                 }  /* if */
962                                 /* other parameter */
963                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
964 end_parameter:  ;
965                         } else {
966                                 /* unknown Pointer access */
967                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
968                         }  /* if */
969                 }  /* if */
970         default:
971                 ;
972         }  /* switch */
973 }  /* stat_update_address */
974
975 /**
976  * Walker for reachable nodes count.
977  */
978 static void update_node_stat(ir_node *node, void *env)
979 {
980         graph_entry_t *graph = env;
981         node_entry_t *entry;
982
983         ir_op *op = stat_get_irn_op(node);
984         int i, arity = get_irn_arity(node);
985
986         entry = opcode_get_entry(op, graph->opcode_hash);
987
988         cnt_inc(&entry->cnt_alive);
989         cnt_add_i(&graph->cnt[gcnt_edges], arity);
990
991         /* count block edges */
992         undate_block_info(node, graph);
993
994         /* count extended block edges */
995         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
996                 if (graph->irg != get_const_code_irg())
997                         update_extbb_info(node, graph);
998         }  /* if */
999
1000         /* handle statistics for special node types */
1001
1002         switch (op->code) {
1003         case iro_Call:
1004                 /* check for properties that depends on calls like recursion/leaf/indirect call */
1005                 stat_update_call(node, graph);
1006                 break;
1007         case iro_Load:
1008                 /* check address properties */
1009                 stat_update_address(get_Load_ptr(node), graph);
1010                 break;
1011         case iro_Store:
1012                 /* check address properties */
1013                 stat_update_address(get_Store_ptr(node), graph);
1014                 break;
1015         case iro_Phi:
1016                 /* check for non-strict Phi nodes */
1017                 for (i = arity - 1; i >= 0; --i) {
1018                         ir_node *pred = get_Phi_pred(node, i);
1019                         if (is_Unknown(pred)) {
1020                                 /* found an Unknown predecessor, graph is not strict */
1021                                 graph->is_strict = 0;
1022                                 break;
1023                         }
1024                 }
1025         default:
1026                 ;
1027         }  /* switch */
1028
1029         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1030         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1031                 int i;
1032
1033                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1034                         ir_node *pred = get_irn_n(node, i);
1035
1036                         if (is_Const(pred)) {
1037                                 /* check properties of constants */
1038                                 stat_update_const(status, pred, graph);
1039                         }  /* if */
1040                 }  /* for */
1041         }  /* if */
1042 }  /* update_node_stat */
1043
1044 /**
1045  * Walker for reachable nodes count for graphs on the wait_q.
1046  */
1047 static void update_node_stat_2(ir_node *node, void *env)
1048 {
1049         graph_entry_t *graph = env;
1050
1051         /* check for properties that depends on calls like recursion/leaf/indirect call */
1052         if (is_Call(node))
1053                 stat_update_call_2(node, graph);
1054 }  /* update_node_stat_2 */
1055
1056 /**
1057  * Get the current address mark.
1058  */
1059 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1060 {
1061         address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1062
1063         return value ? value->mark : 0;
1064 }  /* get_adr_mark */
1065
1066 /**
1067  * Set the current address mark.
1068  */
1069 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1070 {
1071         address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1072
1073         value->mark = val;
1074 }  /* set_adr_mark */
1075
1076 #undef DUMP_ADR_MODE
1077
1078 #ifdef DUMP_ADR_MODE
1079 /**
1080  * a vcg attribute hook: Color a node with a different color if
1081  * it's identified as a part of an address expression or at least referenced
1082  * by an address expression.
1083  */
1084 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1085 {
1086         ir_node *n           = local ? local : node;
1087         ir_graph *irg        = get_irn_irg(n);
1088         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1089         unsigned mark        = get_adr_mark(graph, n);
1090
1091         if (mark & MARK_ADDRESS_CALC)
1092                 fprintf(F, "color: purple");
1093         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1094                 fprintf(F, "color: pink");
1095         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1096                 fprintf(F, "color: lightblue");
1097         else
1098                 return 0;
1099
1100         /* I know the color! */
1101         return 1;
1102 }  /* stat_adr_mark_hook */
1103 #endif /* DUMP_ADR_MODE */
1104
1105 /**
1106  * Return the "operational" mode of a Firm node.
1107  */
1108 static ir_mode *get_irn_op_mode(ir_node *node)
1109 {
1110         switch (get_irn_opcode(node)) {
1111         case iro_Load:
1112                 return get_Load_mode(node);
1113         case iro_Store:
1114                 return get_irn_mode(get_Store_value(node));
1115         case iro_DivMod:
1116                 return get_irn_mode(get_DivMod_left(node));
1117         case iro_Div:
1118                 return get_irn_mode(get_Div_left(node));
1119         case iro_Mod:
1120                 return get_irn_mode(get_Mod_left(node));
1121         case iro_Cmp:
1122                 /* Cmp is no address calculation, or is it? */
1123         default:
1124                 return get_irn_mode(node);
1125         }  /* switch */
1126 }  /* get_irn_op_mode */
1127
1128 /**
1129  * Post-walker that marks every node that is an address calculation.
1130  *
1131  * Users of a node must be visited first. We ensure this by
1132  * calling it in the post of an outs walk. This should work even in cycles,
1133  * while the normal pre-walk will not.
1134  */
1135 static void mark_address_calc(ir_node *node, void *env)
1136 {
1137         graph_entry_t *graph = env;
1138         ir_mode *mode = get_irn_op_mode(node);
1139         int i, n;
1140         unsigned mark_preds = MARK_REF_NON_ADR;
1141
1142         if (! mode_is_data(mode))
1143                 return;
1144
1145         if (mode_is_reference(mode)) {
1146                 /* a reference is calculated here, we are sure */
1147                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1148
1149                 mark_preds = MARK_REF_ADR;
1150         } else {
1151                 unsigned mark = get_adr_mark(graph, node);
1152
1153                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1154                         /*
1155                          * this node has no reference mode, but is only
1156                          * referenced by address calculations
1157                          */
1158                         mark_preds = MARK_REF_ADR;
1159                 }  /* if */
1160         }  /* if */
1161
1162         /* mark all predecessors */
1163         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1164                 ir_node *pred = get_irn_n(node, i);
1165
1166                 mode = get_irn_op_mode(pred);
1167                 if (! mode_is_data(mode))
1168                         continue;
1169
1170                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1171         }  /* for */
1172 }  /* mark_address_calc */
1173
1174 /**
1175  * Post-walker that marks every node that is an address calculation.
1176  *
1177  * Users of a node must be visited first. We ensure this by
1178  * calling it in the post of an outs walk. This should work even in cycles,
1179  * while the normal pre-walk will not.
1180  */
1181 static void count_adr_ops(ir_node *node, void *env)
1182 {
1183         graph_entry_t *graph = env;
1184         unsigned mark        = get_adr_mark(graph, node);
1185
1186         if (mark & MARK_ADDRESS_CALC)
1187                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1188         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1189                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1190         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1191                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1192 }  /* count_adr_ops */
1193
1194 /**
1195  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1196  * is called, must recalculate all statistic info.
1197  *
1198  * @param global    The global entry
1199  * @param graph     The current entry
1200  */
1201 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1202 {
1203         node_entry_t *entry;
1204         int i;
1205
1206         /* clear first the alive counter in the graph */
1207         foreach_pset(graph->opcode_hash, entry) {
1208                 cnt_clr(&entry->cnt_alive);
1209         }  /* foreach_pset */
1210
1211         /* set pessimistic values */
1212         graph->is_leaf       = 1;
1213         graph->is_leaf_call  = LCS_UNKNOWN;
1214         graph->is_recursive  = 0;
1215         graph->is_chain_call = 1;
1216         graph->is_strict     = 1;
1217
1218         /* create new block counter */
1219         graph->block_hash = new_pset(block_cmp, 5);
1220
1221         /* we need dominator info */
1222         if (graph->irg != get_const_code_irg()) {
1223                 assure_doms(graph->irg);
1224
1225                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1226                         /* we need extended basic blocks */
1227                         compute_extbb(graph->irg);
1228
1229                         /* create new extbb counter */
1230                         graph->extbb_hash = new_pset(block_cmp, 5);
1231                 }  /* if */
1232         }  /* if */
1233
1234         /* count the nodes in the graph */
1235         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1236
1237 #if 0
1238         /* Uncomment this code if chain-call means call exact one. */
1239         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1240
1241         /* check if we have more than 1 call */
1242         if (cnt_gt(entry->cnt_alive, 1))
1243                 graph->is_chain_call = 0;
1244 #endif
1245
1246         /* recursive functions are never chain calls, leafs don't have calls */
1247         if (graph->is_recursive || graph->is_leaf)
1248                 graph->is_chain_call = 0;
1249
1250         /* assume we walk every graph only ONCE, we could sum here the global count */
1251         foreach_pset(graph->opcode_hash, entry) {
1252                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1253
1254                 /* update the node counter */
1255                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1256         }  /* foreach_pset */
1257
1258         /* count the number of address calculation */
1259         if (graph->irg != get_const_code_irg()) {
1260                 ir_graph *rem = current_ir_graph;
1261
1262                 assure_irg_outs(graph->irg);
1263
1264                 /* Must be done an the outs graph */
1265                 current_ir_graph = graph->irg;
1266                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1267                 current_ir_graph = rem;
1268
1269 #ifdef DUMP_ADR_MODE
1270                 /* register the vcg hook and dump the graph for test */
1271                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1272                 dump_ir_block_graph(graph->irg, "-adr");
1273                 set_dump_node_vcgattr_hook(NULL);
1274 #endif /* DUMP_ADR_MODE */
1275
1276                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1277         }  /* if */
1278
1279         /* count the DAG's */
1280         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1281                 count_dags_in_graph(global, graph);
1282
1283         /* calculate the patterns of this graph */
1284         stat_calc_pattern_history(graph->irg);
1285
1286         /* leaf function did not call others */
1287         if (graph->is_leaf)
1288                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1289         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1290                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1291                 pdeq_putl(status->wait_q, graph);
1292         }  /* if */
1293
1294         /* we have analyzed this graph */
1295         graph->is_analyzed = 1;
1296
1297         /* accumulate all counter's */
1298         for (i = 0; i < _gcnt_last; ++i)
1299                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1300 }  /* update_graph_stat */
1301
1302 /**
1303  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1304  * must finish all statistic info calculations.
1305  *
1306  * @param global    The global entry
1307  * @param graph     The current entry
1308  */
1309 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1310 {
1311         (void) global;
1312         if (graph->is_deleted) {
1313                 /* deleted, ignore */
1314                 return;
1315         }
1316
1317         if (graph->irg) {
1318                 /* count the nodes in the graph */
1319                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1320
1321                 if (graph->is_leaf_call == LCS_UNKNOWN)
1322                         graph->is_leaf_call = LCS_LEAF_CALL;
1323         }  /* if */
1324 }  /* update_graph_stat_2 */
1325
1326 /**
1327  * Register a dumper.
1328  */
1329 static void stat_register_dumper(const dumper_t *dumper)
1330 {
1331         dumper_t *p = XMALLOC(dumper_t);
1332
1333         memcpy(p, dumper, sizeof(*p));
1334
1335         p->next        = status->dumper;
1336         p->status      = status;
1337         status->dumper = p;
1338
1339         /* FIXME: memory leak */
1340 }  /* stat_register_dumper */
1341
1342 /**
1343  * Dumps the statistics of an IR graph.
1344  */
1345 static void stat_dump_graph(graph_entry_t *entry)
1346 {
1347         dumper_t *dumper;
1348
1349         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1350                 if (dumper->dump_graph)
1351                         dumper->dump_graph(dumper, entry);
1352         }  /* for */
1353 }  /* stat_dump_graph */
1354
1355 /**
1356  * Calls all registered dumper functions.
1357  */
1358 static void stat_dump_registered(graph_entry_t *entry)
1359 {
1360         dumper_t *dumper;
1361
1362         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1363                 if (dumper->func_map) {
1364                         dump_graph_FUNC func;
1365
1366                         foreach_pset(dumper->func_map, func)
1367                                 func(dumper, entry);
1368                 }  /* if */
1369         }  /* for */
1370 }  /* stat_dump_registered */
1371
1372 /**
1373  * Dumps a constant table.
1374  */
1375 static void stat_dump_consts(const constant_info_t *tbl)
1376 {
1377         dumper_t *dumper;
1378
1379         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1380                 if (dumper->dump_const_tbl)
1381                         dumper->dump_const_tbl(dumper, tbl);
1382         }  /* for */
1383 }  /* stat_dump_consts */
1384
1385 /**
1386  * Dumps the parameter distribution
1387  */
1388 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1389 {
1390         dumper_t *dumper;
1391
1392         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1393                 if (dumper->dump_param_tbl)
1394                         dumper->dump_param_tbl(dumper, tbl, global);
1395         }  /* for */
1396 }  /* stat_dump_param_tbl */
1397
1398 /**
1399  * Dumps the optimization counter
1400  */
1401 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1402 {
1403         dumper_t *dumper;
1404
1405         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1406                 if (dumper->dump_opt_cnt)
1407                         dumper->dump_opt_cnt(dumper, tbl, len);
1408         }  /* for */
1409 }  /* stat_dump_opt_cnt */
1410
1411 /**
1412  * Initialize the dumper.
1413  */
1414 static void stat_dump_init(const char *name)
1415 {
1416         dumper_t *dumper;
1417
1418         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1419                 if (dumper->init)
1420                         dumper->init(dumper, name);
1421         }  /* for */
1422 }  /* stat_dump_init */
1423
1424 /**
1425  * Finish the dumper.
1426  */
1427 static void stat_dump_finish(void)
1428 {
1429         dumper_t *dumper;
1430
1431         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1432                 if (dumper->finish)
1433                         dumper->finish(dumper);
1434         }  /* for */
1435 }  /* stat_dump_finish */
1436
1437 /**
1438  * Register an additional function for all dumper.
1439  */
1440 void stat_register_dumper_func(dump_graph_FUNC func)
1441 {
1442         dumper_t *dumper;
1443
1444         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1445                 if (! dumper->func_map)
1446                         dumper->func_map = pset_new_ptr(3);
1447                 pset_insert_ptr(dumper->func_map, func);
1448         }  /* for */
1449 }  /* stat_register_dumper_func */
1450
1451 /* ---------------------------------------------------------------------- */
1452
1453 /*
1454  * Helper: get an ir_op from an opcode.
1455  */
1456 ir_op *stat_get_op_from_opcode(ir_opcode code)
1457 {
1458         return opcode_find_entry(code, status->ir_op_hash);
1459 }  /* stat_get_op_from_opcode */
1460
1461 /**
1462  * Hook: A new IR op is registered.
1463  *
1464  * @param ctx  the hook context
1465  * @param op   the new IR opcode that was created.
1466  */
1467 static void stat_new_ir_op(void *ctx, ir_op *op)
1468 {
1469         (void) ctx;
1470         if (! status->stat_options)
1471                 return;
1472
1473         STAT_ENTER;
1474         {
1475                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1476
1477                 /* execute for side effect :-) */
1478                 (void)opcode_get_entry(op, graph->opcode_hash);
1479
1480                 pset_insert(status->ir_op_hash, op, op->code);
1481         }
1482         STAT_LEAVE;
1483 }  /* stat_new_ir_op */
1484
1485 /**
1486  * Hook: An IR op is freed.
1487  *
1488  * @param ctx  the hook context
1489  * @param op   the IR opcode that is freed
1490  */
1491 static void stat_free_ir_op(void *ctx, ir_op *op)
1492 {
1493         (void) ctx;
1494         (void) op;
1495         if (! status->stat_options)
1496                 return;
1497
1498         STAT_ENTER;
1499         {
1500         }
1501         STAT_LEAVE;
1502 }  /* stat_free_ir_op */
1503
1504 /**
1505  * Hook: A new node is created.
1506  *
1507  * @param ctx   the hook context
1508  * @param irg   the IR graph on which the node is created
1509  * @param node  the new IR node that was created
1510  */
1511 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1512 {
1513         (void) ctx;
1514         (void) irg;
1515         if (! status->stat_options)
1516                 return;
1517
1518         /* do NOT count during dead node elimination */
1519         if (status->in_dead_node_elim)
1520                 return;
1521
1522         STAT_ENTER;
1523         {
1524                 node_entry_t *entry;
1525                 graph_entry_t *graph;
1526                 ir_op *op = stat_get_irn_op(node);
1527
1528                 /* increase global value */
1529                 graph = graph_get_entry(NULL, status->irg_hash);
1530                 entry = opcode_get_entry(op, graph->opcode_hash);
1531                 cnt_inc(&entry->new_node);
1532
1533                 /* increase local value */
1534                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1535                 entry = opcode_get_entry(op, graph->opcode_hash);
1536                 cnt_inc(&entry->new_node);
1537         }
1538         STAT_LEAVE;
1539 }  /* stat_new_node */
1540
1541 /**
1542  * Hook: A node is changed into a Id node
1543  *
1544  * @param ctx   the hook context
1545  * @param node  the IR node that will be turned into an ID
1546  */
1547 static void stat_turn_into_id(void *ctx, ir_node *node)
1548 {
1549         (void) ctx;
1550         if (! status->stat_options)
1551                 return;
1552
1553         STAT_ENTER;
1554         {
1555                 node_entry_t *entry;
1556                 graph_entry_t *graph;
1557                 ir_op *op = stat_get_irn_op(node);
1558
1559                 /* increase global value */
1560                 graph = graph_get_entry(NULL, status->irg_hash);
1561                 entry = opcode_get_entry(op, graph->opcode_hash);
1562                 cnt_inc(&entry->into_Id);
1563
1564                 /* increase local value */
1565                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1566                 entry = opcode_get_entry(op, graph->opcode_hash);
1567                 cnt_inc(&entry->into_Id);
1568         }
1569         STAT_LEAVE;
1570 }  /* stat_turn_into_id */
1571
1572 /**
1573  * Hook: A node is normalized
1574  *
1575  * @param ctx   the hook context
1576  * @param node  the IR node that was normalized
1577  */
1578 static void stat_normalize(void *ctx, ir_node *node)
1579 {
1580         (void) ctx;
1581         if (! status->stat_options)
1582                 return;
1583
1584         STAT_ENTER;
1585         {
1586                 node_entry_t *entry;
1587                 graph_entry_t *graph;
1588                 ir_op *op = stat_get_irn_op(node);
1589
1590                 /* increase global value */
1591                 graph = graph_get_entry(NULL, status->irg_hash);
1592                 entry = opcode_get_entry(op, graph->opcode_hash);
1593                 cnt_inc(&entry->normalized);
1594
1595                 /* increase local value */
1596                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1597                 entry = opcode_get_entry(op, graph->opcode_hash);
1598                 cnt_inc(&entry->normalized);
1599         }
1600         STAT_LEAVE;
1601 }  /* stat_normalize */
1602
1603 /**
1604  * Hook: A new graph was created
1605  *
1606  * @param ctx  the hook context
1607  * @param irg  the new IR graph that was created
1608  * @param ent  the entity of this graph
1609  */
1610 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1611 {
1612         (void) ctx;
1613         if (! status->stat_options)
1614                 return;
1615
1616         STAT_ENTER;
1617         {
1618                 /* execute for side effect :-) */
1619                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1620
1621                 graph->ent           = ent;
1622                 graph->is_deleted    = 0;
1623                 graph->is_leaf       = 0;
1624                 graph->is_leaf_call  = 0;
1625                 graph->is_recursive  = 0;
1626                 graph->is_chain_call = 0;
1627                 graph->is_strict     = 1;
1628                 graph->is_analyzed   = 0;
1629         }
1630         STAT_LEAVE;
1631 }  /* stat_new_graph */
1632
1633 /**
1634  * Hook: A graph will be deleted
1635  *
1636  * @param ctx  the hook context
1637  * @param irg  the IR graph that will be deleted
1638  *
1639  * Note that we still hold the information for this graph
1640  * in our hash maps, only a flag is set which prevents this
1641  * information from being changed, it's "frozen" from now.
1642  */
1643 static void stat_free_graph(void *ctx, ir_graph *irg)
1644 {
1645         (void) ctx;
1646         if (! status->stat_options)
1647                 return;
1648
1649         STAT_ENTER;
1650         {
1651                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1652                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1653
1654                 graph->is_deleted = 1;
1655
1656                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1657                         /* count the nodes of the graph yet, it will be destroyed later */
1658                         update_graph_stat(global, graph);
1659                 }  /* if */
1660         }
1661         STAT_LEAVE;
1662 }  /* stat_free_graph */
1663
1664 /**
1665  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1666  *
1667  * @param ctx  the hook context
1668  * @param irg  the IR graph that will be walked
1669  * @param pre  the pre walker
1670  * @param post the post walker
1671  */
1672 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1673 {
1674         (void) ctx;
1675         (void) pre;
1676         (void) post;
1677         if (! status->stat_options)
1678                 return;
1679
1680         STAT_ENTER_SINGLE;
1681         {
1682                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1683
1684                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1685         }
1686         STAT_LEAVE;
1687 }  /* stat_irg_walk */
1688
1689 /**
1690  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1691  *
1692  * @param ctx  the hook context
1693  * @param irg  the IR graph that will be walked
1694  * @param pre  the pre walker
1695  * @param post the post walker
1696  */
1697 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1698 {
1699         /* for now, do NOT differentiate between blockwise and normal */
1700         stat_irg_walk(ctx, irg, pre, post);
1701 }  /* stat_irg_walk_blkwise */
1702
1703 /**
1704  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1705  *
1706  * @param ctx  the hook context
1707  * @param irg  the IR graph that will be walked
1708  * @param node the IR node
1709  * @param pre  the pre walker
1710  * @param post the post walker
1711  */
1712 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1713 {
1714         (void) ctx;
1715         (void) node;
1716         (void) pre;
1717         (void) post;
1718         if (! status->stat_options)
1719                 return;
1720
1721         STAT_ENTER_SINGLE;
1722         {
1723                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1724
1725                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1726         }
1727         STAT_LEAVE;
1728 }  /* stat_irg_block_walk */
1729
1730 /**
1731  * Called for every node that is removed due to an optimization.
1732  *
1733  * @param n     the IR node that will be removed
1734  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1735  * @param kind  the optimization kind
1736  */
1737 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1738 {
1739         opt_entry_t *entry;
1740         ir_op *op = stat_get_irn_op(n);
1741
1742         /* ignore CSE for Constants */
1743         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1744                 return;
1745
1746         /* increase global value */
1747         entry = opt_get_entry(op, hmap);
1748         cnt_inc(&entry->count);
1749 }  /* removed_due_opt */
1750
1751 /**
1752  * Hook: Some nodes were optimized into some others due to an optimization.
1753  *
1754  * @param ctx  the hook context
1755  */
1756 static void stat_merge_nodes(
1757     void *ctx,
1758     ir_node **new_node_array, int new_num_entries,
1759     ir_node **old_node_array, int old_num_entries,
1760     hook_opt_kind opt)
1761 {
1762         (void) ctx;
1763         if (! status->stat_options)
1764                 return;
1765
1766         STAT_ENTER;
1767         {
1768                 int i, j;
1769                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1770
1771                 cnt_inc(&status->num_opts[opt]);
1772                 if (status->reassoc_run)
1773                         opt = HOOK_OPT_REASSOC;
1774
1775                 for (i = 0; i < old_num_entries; ++i) {
1776                         /* nodes might be in new and old, so if we found a node
1777                            in both sets, this one  is NOT removed */
1778                         for (j = 0; j < new_num_entries; ++j) {
1779                                 if (old_node_array[i] == new_node_array[j])
1780                                         break;
1781                         }  /* for */
1782                         if (j >= new_num_entries) {
1783                                 int xopt = opt;
1784
1785                                 /* sometimes we did not detect, that it is replaced by a Const */
1786                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1787                                         ir_op *op = get_irn_op(new_node_array[0]);
1788
1789                                         if (op == op_Const || op == op_SymConst)
1790                                                 xopt = HOOK_OPT_CONFIRM_C;
1791                                 }  /* if */
1792
1793                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], xopt);
1794                         }  /* if */
1795                 }  /* for */
1796         }
1797         STAT_LEAVE;
1798 }  /* stat_merge_nodes */
1799
1800 /**
1801  * Hook: Reassociation is started/stopped.
1802  *
1803  * @param ctx   the hook context
1804  * @param flag  if non-zero, reassociation is started else stopped
1805  */
1806 static void stat_reassociate(void *ctx, int flag)
1807 {
1808         (void) ctx;
1809         if (! status->stat_options)
1810                 return;
1811
1812         STAT_ENTER;
1813         {
1814                 status->reassoc_run = flag;
1815         }
1816         STAT_LEAVE;
1817 }  /* stat_reassociate */
1818
1819 /**
1820  * Hook: A node was lowered into other nodes
1821  *
1822  * @param ctx  the hook context
1823  * @param node the IR node that will be lowered
1824  */
1825 static void stat_lower(void *ctx, ir_node *node)
1826 {
1827         (void) ctx;
1828         if (! status->stat_options)
1829                 return;
1830
1831         STAT_ENTER;
1832         {
1833                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1834
1835                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1836         }
1837         STAT_LEAVE;
1838 }  /* stat_lower */
1839
1840 /**
1841  * Hook: A graph was inlined.
1842  *
1843  * @param ctx  the hook context
1844  * @param call the IR call that will re changed into the body of
1845  *             the called IR graph
1846  * @param called_irg  the IR graph representing the called routine
1847  */
1848 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1849 {
1850         (void) ctx;
1851         if (! status->stat_options)
1852                 return;
1853
1854         STAT_ENTER;
1855         {
1856                 ir_graph *irg = get_irn_irg(call);
1857                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1858                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1859
1860                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1861                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1862         }
1863         STAT_LEAVE;
1864 }  /* stat_inline */
1865
1866 /**
1867  * Hook: A graph with tail-recursions was optimized.
1868  *
1869  * @param ctx  the hook context
1870  */
1871 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1872 {
1873         (void) ctx;
1874         if (! status->stat_options)
1875                 return;
1876
1877         STAT_ENTER;
1878         {
1879                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1880
1881                 graph->num_tail_recursion += n_calls;
1882         }
1883         STAT_LEAVE;
1884 }  /* stat_tail_rec */
1885
1886 /**
1887  * Strength reduction was performed on an iteration variable.
1888  *
1889  * @param ctx  the hook context
1890  */
1891 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1892 {
1893         (void) ctx;
1894         if (! status->stat_options)
1895                 return;
1896
1897         STAT_ENTER;
1898         {
1899                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1900                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1901
1902                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1903         }
1904         STAT_LEAVE;
1905 }  /* stat_strength_red */
1906
1907 /**
1908  * Hook: Start/Stop the dead node elimination.
1909  *
1910  * @param ctx  the hook context
1911  */
1912 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1913 {
1914         (void) ctx;
1915         (void) irg;
1916         if (! status->stat_options)
1917                 return;
1918
1919         status->in_dead_node_elim = (start != 0);
1920 }  /* stat_dead_node_elim */
1921
1922 /**
1923  * Hook: if-conversion was tried.
1924  */
1925 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1926                                int pos, ir_node *mux, if_result_t reason)
1927 {
1928         (void) context;
1929         (void) phi;
1930         (void) pos;
1931         (void) mux;
1932         if (! status->stat_options)
1933                 return;
1934
1935         STAT_ENTER;
1936         {
1937                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1938
1939                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1940         }
1941         STAT_LEAVE;
1942 }  /* stat_if_conversion */
1943
1944 /**
1945  * Hook: real function call was optimized.
1946  */
1947 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1948 {
1949         (void) context;
1950         (void) call;
1951         if (! status->stat_options)
1952                 return;
1953
1954         STAT_ENTER;
1955         {
1956                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1957
1958                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1959         }
1960         STAT_LEAVE;
1961 }  /* stat_func_call */
1962
1963 /**
1964  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1965  *
1966  * @param ctx  the hook context
1967  */
1968 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1969 {
1970         (void) ctx;
1971         if (! status->stat_options)
1972                 return;
1973
1974         STAT_ENTER;
1975         {
1976                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1977                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1978         }
1979         STAT_LEAVE;
1980 }  /* stat_arch_dep_replace_mul_with_shifts */
1981
1982 /**
1983  * Hook: A division by const was replaced.
1984  *
1985  * @param ctx   the hook context
1986  * @param node  the division node that will be optimized
1987  */
1988 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1989 {
1990         (void) ctx;
1991         if (! status->stat_options)
1992                 return;
1993
1994         STAT_ENTER;
1995         {
1996                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1997                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1998         }
1999         STAT_LEAVE;
2000 }  /* stat_arch_dep_replace_division_by_const */
2001
2002 /*
2003  * Update the register pressure of a block.
2004  *
2005  * @param irg        the irg containing the block
2006  * @param block      the block for which the reg pressure should be set
2007  * @param pressure   the pressure
2008  * @param class_name the name of the register class
2009  */
2010 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
2011 {
2012         if (! status->stat_options)
2013                 return;
2014
2015         STAT_ENTER;
2016         {
2017                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
2018                 be_block_entry_t     *block_ent;
2019                 reg_pressure_entry_t *rp_ent;
2020
2021                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2022                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
2023
2024                 rp_ent->class_name = class_name;
2025                 rp_ent->pressure   = pressure;
2026
2027                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2028         }
2029         STAT_LEAVE;
2030 }  /* stat_be_block_regpressure */
2031
2032 /**
2033  * Update the distribution of ready nodes of a block
2034  *
2035  * @param irg        the irg containing the block
2036  * @param block      the block for which the reg pressure should be set
2037  * @param num_ready  the number of ready nodes
2038  */
2039 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2040 {
2041         if (! status->stat_options)
2042                 return;
2043
2044         STAT_ENTER;
2045         {
2046                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2047                 be_block_entry_t *block_ent;
2048
2049                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2050
2051                 /* increase the counter of corresponding number of ready nodes */
2052                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2053         }
2054         STAT_LEAVE;
2055 }  /* stat_be_block_sched_ready */
2056
2057 /**
2058  * Update the permutation statistic of a block.
2059  *
2060  * @param class_name the name of the register class
2061  * @param n_regs     number of registers in the register class
2062  * @param perm       the perm node
2063  * @param block      the block containing the perm
2064  * @param size       the size of the perm
2065  * @param real_size  number of pairs with different registers
2066  */
2067 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2068                              int size, int real_size)
2069 {
2070         if (! status->stat_options)
2071                 return;
2072
2073         STAT_ENTER;
2074         {
2075                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2076                 be_block_entry_t   *block_ent;
2077                 perm_class_entry_t *pc_ent;
2078                 perm_stat_entry_t  *ps_ent;
2079
2080                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2081                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2082                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2083
2084                 pc_ent->n_regs = n_regs;
2085
2086                 /* update information */
2087                 ps_ent->size      = size;
2088                 ps_ent->real_size = real_size;
2089         }
2090         STAT_LEAVE;
2091 }  /* stat_be_block_stat_perm */
2092
2093 /**
2094  * Update the permutation statistic of a single perm.
2095  *
2096  * @param class_name the name of the register class
2097  * @param perm       the perm node
2098  * @param block      the block containing the perm
2099  * @param is_chain   1 if chain, 0 if cycle
2100  * @param size       length of the cycle/chain
2101  * @param n_ops      the number of ops representing this cycle/chain after lowering
2102  */
2103 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2104                                   int is_chain, int size, int n_ops)
2105 {
2106         if (! status->stat_options)
2107                 return;
2108
2109         STAT_ENTER;
2110         {
2111                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2112                 be_block_entry_t   *block_ent;
2113                 perm_class_entry_t *pc_ent;
2114                 perm_stat_entry_t  *ps_ent;
2115
2116                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2117                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2118                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2119
2120                 if (is_chain) {
2121                         ps_ent->n_copies += n_ops;
2122                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2123                 } else {
2124                         ps_ent->n_exchg += n_ops;
2125                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2126                 }  /* if */
2127         }
2128         STAT_LEAVE;
2129 }  /* stat_be_block_stat_permcycle */
2130
2131 /* Dumps a statistics snapshot. */
2132 void stat_dump_snapshot(const char *name, const char *phase)
2133 {
2134         char fname[2048];
2135         const char *p;
2136         int l;
2137
2138         if (! status->stat_options)
2139                 return;
2140
2141         STAT_ENTER;
2142         {
2143                 graph_entry_t *entry;
2144                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2145
2146                 /*
2147                  * The constant counter is only global, so we clear it here.
2148                  * Note that it does NOT contain the constants in DELETED
2149                  * graphs due to this.
2150                  */
2151                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2152                         stat_const_clear(status);
2153
2154                 /* build the name */
2155                 p = strrchr(name, '/');
2156 #ifdef _WIN32
2157                 {
2158                         const char *q;
2159
2160                         q = strrchr(name, '\\');
2161
2162                         /* NULL might be not the smallest pointer */
2163                         if (q && (!p || q > p))
2164                                 p = q;
2165                 }
2166 #endif /* _WIN32 */
2167                 if (p) {
2168                         ++p;
2169                         l = p - name;
2170
2171                         if (l > (int) (sizeof(fname) - 1))
2172                                 l = sizeof(fname) - 1;
2173
2174                         memcpy(fname, name, l);
2175                         fname[l] = '\0';
2176                 } else {
2177                         fname[0] = '\0';
2178                         p = name;
2179                 }  /* if */
2180                 strncat(fname, "firmstat-", sizeof(fname)-1);
2181                 strncat(fname, phase, sizeof(fname)-1);
2182                 strncat(fname, "-", sizeof(fname)-1);
2183                 strncat(fname, p, sizeof(fname)-1);
2184
2185                 stat_dump_init(fname);
2186
2187                 /* calculate the graph statistics */
2188                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2189                         if (entry->irg == NULL) {
2190                                 /* special entry for the global count */
2191                                 continue;
2192                         }  /* if */
2193                         if (! entry->is_deleted) {
2194                                 /* the graph is still alive, count the nodes on it */
2195                                 update_graph_stat(global, entry);
2196                         }  /* if */
2197                 }  /* for */
2198
2199                 /* some calculations are dependent, we pushed them on the wait_q */
2200                 while (! pdeq_empty(status->wait_q)) {
2201                         entry = pdeq_getr(status->wait_q);
2202
2203                         update_graph_stat_2(global, entry);
2204                 }  /* while */
2205
2206                 /* dump per graph */
2207                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2208                         if (entry->irg == NULL) {
2209                                 /* special entry for the global count */
2210                                 continue;
2211                         }  /* if */
2212
2213                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2214                                 stat_dump_graph(entry);
2215                                 stat_dump_registered(entry);
2216                         }  /* if */
2217
2218                         if (! entry->is_deleted) {
2219                                 /* clear the counter that are not accumulated */
2220                                 graph_clear_entry(entry, 0);
2221                         }  /* if */
2222                 }  /* for */
2223
2224                 /* dump global */
2225                 stat_dump_graph(global);
2226
2227                 /* dump the const info */
2228                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2229                         stat_dump_consts(&status->const_info);
2230
2231                 /* dump the parameter distribution */
2232                 stat_dump_param_tbl(status->dist_param_cnt, global);
2233
2234                 /* dump the optimization counter and clear them */
2235                 stat_dump_opt_cnt(status->num_opts, ARR_SIZE(status->num_opts));
2236                 clear_optimization_counter();
2237
2238                 stat_dump_finish();
2239
2240                 stat_finish_pattern_history(fname);
2241
2242                 /* clear the global counters here */
2243                 {
2244                         node_entry_t *entry;
2245
2246                         for (entry = pset_first(global->opcode_hash); entry; entry = pset_next(global->opcode_hash)) {
2247                                 opcode_clear_entry(entry);
2248                         }  /* for */
2249                         /* clear all global counter */
2250                         graph_clear_entry(global, /*all=*/1);
2251                 }
2252         }
2253         STAT_LEAVE;
2254 }  /* stat_dump_snapshot */
2255
2256 struct pass_t {
2257         ir_prog_pass_t pass;
2258         const char     *fname;
2259         const char     *phase;
2260 };
2261
2262 /**
2263  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2264  */
2265 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2266 {
2267         struct pass_t *pass = context;
2268
2269         (void)irp;
2270         stat_dump_snapshot(pass->fname, pass->phase);
2271         return 0;
2272 }  /* stat_dump_snapshot_wrapper */
2273
2274 /**
2275  * Ensure that no verifier is run from the wrapper.
2276  */
2277 static int no_verify(ir_prog *prog, void *ctx)
2278 {
2279         (void)prog;
2280         (void)ctx;
2281         return 0;
2282 }
2283
2284 /**
2285  * Ensure that no dumper is run from the wrapper.
2286  */
2287 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2288 {
2289         (void)prog;
2290         (void)ctx;
2291         (void)idx;
2292 }
2293
2294 /* create an ir_pog pass */
2295 ir_prog_pass_t *stat_dump_snapshot_pass(
2296         const char *name, const char *fname, const char *phase)
2297 {
2298         struct pass_t *pass = XMALLOCZ(struct pass_t);
2299
2300         def_prog_pass_constructor(
2301                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2302         pass->fname = fname;
2303         pass->phase = phase;
2304
2305         /* no dump/verify */
2306         pass->pass.dump_irprog   = no_dump;
2307         pass->pass.verify_irprog = no_verify;
2308
2309         return &pass->pass;
2310 }  /* stat_dump_snapshot_pass */
2311
2312 /** the hook entries for the Firm statistics module */
2313 static hook_entry_t stat_hooks[hook_last];
2314
2315 /* initialize the statistics module. */
2316 void firm_init_stat(unsigned enable_options)
2317 {
2318 #define X(a)  a, sizeof(a)-1
2319 #define HOOK(h, fkt) \
2320         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2321         unsigned num = 0;
2322
2323         if (! (enable_options & FIRMSTAT_ENABLED))
2324                 return;
2325
2326         status = XMALLOCZ(stat_info_t);
2327
2328         /* enable statistics */
2329         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2330
2331         /* register all hooks */
2332         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2333         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2334         HOOK(hook_new_node,                           stat_new_node);
2335         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2336         HOOK(hook_normalize,                          stat_normalize);
2337         HOOK(hook_new_graph,                          stat_new_graph);
2338         HOOK(hook_free_graph,                         stat_free_graph);
2339         HOOK(hook_irg_walk,                           stat_irg_walk);
2340         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2341         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2342         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2343         HOOK(hook_reassociate,                        stat_reassociate);
2344         HOOK(hook_lower,                              stat_lower);
2345         HOOK(hook_inline,                             stat_inline);
2346         HOOK(hook_tail_rec,                           stat_tail_rec);
2347         HOOK(hook_strength_red,                       stat_strength_red);
2348         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2349         HOOK(hook_if_conversion,                      stat_if_conversion);
2350         HOOK(hook_func_call,                          stat_func_call);
2351         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2352         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2353
2354         obstack_init(&status->cnts);
2355         obstack_init(&status->be_data);
2356
2357         /* create the hash-tables */
2358         status->irg_hash   = new_pset(graph_cmp, 8);
2359         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2360
2361         /* create the wait queue */
2362         status->wait_q     = new_pdeq();
2363
2364         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2365                 /* build the pseudo-ops */
2366
2367                 _op_Phi0.code    = --num;
2368                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2369
2370                 _op_PhiM.code    = --num;
2371                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2372
2373                 _op_ProjM.code   = --num;
2374                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2375
2376                 _op_MulC.code    = --num;
2377                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2378
2379                 _op_DivC.code    = --num;
2380                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2381
2382                 _op_ModC.code    = --num;
2383                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2384
2385                 _op_DivModC.code = --num;
2386                 _op_DivModC.name = new_id_from_chars(X("DivModC"));
2387
2388                 _op_QuotC.code   = --num;
2389                 _op_QuotC.name   = new_id_from_chars(X("QuotC"));
2390
2391                 status->op_Phi0    = &_op_Phi0;
2392                 status->op_PhiM    = &_op_PhiM;
2393                 status->op_ProjM   = &_op_ProjM;
2394                 status->op_MulC    = &_op_MulC;
2395                 status->op_DivC    = &_op_DivC;
2396                 status->op_ModC    = &_op_ModC;
2397                 status->op_DivModC = &_op_DivModC;
2398                 status->op_QuotC   = &_op_QuotC;
2399         } else {
2400                 status->op_Phi0    = NULL;
2401                 status->op_PhiM    = NULL;
2402                 status->op_ProjM   = NULL;
2403                 status->op_MulC    = NULL;
2404                 status->op_DivC    = NULL;
2405                 status->op_ModC    = NULL;
2406                 status->op_DivModC = NULL;
2407                 status->op_QuotC   = NULL;
2408         }  /* if */
2409
2410         /* for Florian: count the Sel depth */
2411         if (enable_options & FIRMSTAT_COUNT_SELS) {
2412                 _op_SelSel.code    = --num;
2413                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2414
2415                 _op_SelSelSel.code = --num;
2416                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2417
2418                 status->op_SelSel    = &_op_SelSel;
2419                 status->op_SelSelSel = &_op_SelSelSel;
2420         } else {
2421                 status->op_SelSel    = NULL;
2422                 status->op_SelSelSel = NULL;
2423         }  /* if */
2424
2425         /* register the dumper */
2426         stat_register_dumper(&simple_dumper);
2427
2428         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2429                 stat_register_dumper(&csv_dumper);
2430
2431         /* initialize the pattern hash */
2432         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2433
2434         /* initialize the Const options */
2435         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2436                 stat_init_const_cnt(status);
2437
2438         /* distribution table for parameter counts */
2439         status->dist_param_cnt = stat_new_int_distrib_tbl();
2440
2441         clear_optimization_counter();
2442
2443 #undef HOOK
2444 #undef X
2445 }  /* firm_init_stat */
2446
2447 /**
2448  * Frees all dumper structures.
2449  */
2450 static void stat_term_dumper(void)
2451 {
2452         dumper_t *dumper, *next_dumper;
2453
2454         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2455                 if (dumper->func_map)
2456                         del_pset(dumper->func_map);
2457
2458                 next_dumper = dumper->next;
2459                 free(dumper);
2460                 dumper = next_dumper;
2461         }  /* for */
2462 }  /* stat_term_dumper */
2463
2464
2465 /* Terminates the statistics module, frees all memory. */
2466 void stat_term(void)
2467 {
2468         if (status != (stat_info_t *)&status_disable) {
2469                 obstack_free(&status->be_data, NULL);
2470                 obstack_free(&status->cnts, NULL);
2471
2472                 stat_term_dumper();
2473
2474                 xfree(status);
2475                 status = (stat_info_t *)&status_disable;
2476         }
2477 }  /* stat_term */
2478
2479 /* returns 1 if statistics were initialized, 0 otherwise */
2480 int stat_is_active(void)
2481 {
2482         return status != (stat_info_t *)&status_disable;
2483 }  /* stat_is_active */
2484
2485 #else
2486
2487 /* initialize the statistics module. */
2488 void firm_init_stat(unsigned enable_options) {}
2489
2490 /* Dumps a statistics snapshot */
2491 void stat_dump_snapshot(const char *name, const char *phase) {}
2492
2493 /* terminates the statistics module, frees all memory */
2494 void stat_term(void);
2495
2496 #endif /* FIRM_STATISTICS */