5335d02e18016645f3652edcc15cdbbb10028b1f
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44 #include "util.h"
45
46 /*
47  * need this to be static:
48  * Special pseudo Opcodes that we need to count some interesting cases
49  */
50
51 /**
52  * The Phi0, a node that is created during SSA construction
53  */
54 static ir_op _op_Phi0;
55
56 /** The PhiM, just to count memory Phi's. */
57 static ir_op _op_PhiM;
58
59 /** The Mul by Const node. */
60 static ir_op _op_MulC;
61
62 /** The Div by Const node. */
63 static ir_op _op_DivC;
64
65 /** The Div by Const node. */
66 static ir_op _op_ModC;
67
68 /** The memory Proj node. */
69 static ir_op _op_ProjM;
70
71 /** A Sel of a Sel */
72 static ir_op _op_SelSel;
73
74 /** A Sel of a Sel of a Sel */
75 static ir_op _op_SelSelSel;
76
77 /* ---------------------------------------------------------------------------------- */
78
79 /** Marks the begin of a statistic (hook) function. */
80 #define STAT_ENTER    ++status->recursive
81
82 /** Marks the end of a statistic (hook) functions. */
83 #define STAT_LEAVE    --status->recursive
84
85 /** Allows to enter a statistic function only when we are not already in a hook. */
86 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
87
88 /**
89  * global status
90  */
91 static const unsigned status_disable = 0;
92 static stat_info_t *status = (stat_info_t *)&status_disable;
93
94 /**
95  * Compare two elements of the opcode hash.
96  */
97 static int opcode_cmp(const void *elt, const void *key)
98 {
99         const node_entry_t *e1 = (const node_entry_t*)elt;
100         const node_entry_t *e2 = (const node_entry_t*)key;
101
102         return e1->op->code - e2->op->code;
103 }  /* opcode_cmp */
104
105 /**
106  * Compare two elements of the graph hash.
107  */
108 static int graph_cmp(const void *elt, const void *key)
109 {
110         const graph_entry_t *e1 = (const graph_entry_t*)elt;
111         const graph_entry_t *e2 = (const graph_entry_t*)key;
112
113         return e1->irg != e2->irg;
114 }  /* graph_cmp */
115
116 /**
117  * Compare two elements of the optimization hash.
118  */
119 static int opt_cmp(const void *elt, const void *key)
120 {
121         const opt_entry_t *e1 = (const opt_entry_t*)elt;
122         const opt_entry_t *e2 = (const opt_entry_t*)key;
123
124         return e1->op->code != e2->op->code;
125 }  /* opt_cmp */
126
127 /**
128  * Compare two elements of the block/extbb hash.
129  */
130 static int block_cmp(const void *elt, const void *key)
131 {
132         const block_entry_t *e1 = (const block_entry_t*)elt;
133         const block_entry_t *e2 = (const block_entry_t*)key;
134
135         /* it's enough to compare the block number */
136         return e1->block_nr != e2->block_nr;
137 }  /* block_cmp */
138
139 /**
140  * Compare two elements of the be_block hash.
141  */
142 static int be_block_cmp(const void *elt, const void *key)
143 {
144         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
145         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
146
147         return e1->block_nr != e2->block_nr;
148 }  /* be_block_cmp */
149
150 /**
151  * Compare two elements of reg pressure hash.
152  */
153 static int reg_pressure_cmp(const void *elt, const void *key)
154 {
155         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
156         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
157
158         return e1->class_name != e2->class_name;
159 }  /* reg_pressure_cmp */
160
161 /**
162  * Compare two elements of the perm_stat hash.
163  */
164 static int perm_stat_cmp(const void *elt, const void *key)
165 {
166         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
167         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
168
169         return e1->perm != e2->perm;
170 }  /* perm_stat_cmp */
171
172 /**
173  * Compare two elements of the perm_class hash.
174  */
175 static int perm_class_cmp(const void *elt, const void *key)
176 {
177         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
178         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
179
180         return e1->class_name != e2->class_name;
181 }  /* perm_class_cmp */
182
183 /**
184  * Compare two elements of the ir_op hash.
185  */
186 static int opcode_cmp_2(const void *elt, const void *key)
187 {
188         const ir_op *e1 = (const ir_op*)elt;
189         const ir_op *e2 = (const ir_op*)key;
190
191         return e1->code != e2->code;
192 }  /* opcode_cmp_2 */
193
194 /**
195  * Compare two elements of the address_mark set.
196  */
197 static int address_mark_cmp(const void *elt, const void *key, size_t size)
198 {
199         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
200         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
201         (void) size;
202
203         /* compare only the nodes, the rest is used as data container */
204         return e1->node != e2->node;
205 }  /* address_mark_cmp */
206
207 /**
208  * Clear all counter in a node_entry_t.
209  */
210 static void opcode_clear_entry(node_entry_t *elem)
211 {
212         cnt_clr(&elem->cnt_alive);
213         cnt_clr(&elem->new_node);
214         cnt_clr(&elem->into_Id);
215         cnt_clr(&elem->normalized);
216 }  /* opcode_clear_entry */
217
218 /**
219  * Returns the associates node_entry_t for an ir_op (and allocates
220  * one if not yet available).
221  *
222  * @param op    the IR operation
223  * @param hmap  a hash map containing ir_op* -> node_entry_t*
224  */
225 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
226 {
227         node_entry_t key;
228         node_entry_t *elem;
229
230         key.op = op;
231
232         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
233         if (elem)
234                 return elem;
235
236         elem = OALLOCZ(&status->cnts, node_entry_t);
237
238         /* clear counter */
239         opcode_clear_entry(elem);
240
241         elem->op = op;
242
243         return (node_entry_t*)pset_insert(hmap, elem, op->code);
244 }  /* opcode_get_entry */
245
246 /**
247  * Returns the associates ir_op for an opcode
248  *
249  * @param code  the IR opcode
250  * @param hmap  the hash map containing opcode -> ir_op*
251  */
252 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
253 {
254         ir_op key;
255
256         key.code = code;
257         return (ir_op*)pset_find(hmap, &key, code);
258 }  /* opcode_find_entry */
259
260 /**
261  * Clears all counter in a graph_entry_t.
262  *
263  * @param elem  the graph entry
264  * @param all   if non-zero, clears all counters, else leave accumulated ones
265  */
266 static void graph_clear_entry(graph_entry_t *elem, int all)
267 {
268         int i;
269
270         /* clear accumulated / non-accumulated counter */
271         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
272                 cnt_clr(&elem->cnt[i]);
273         }  /* for */
274
275         if (elem->block_hash) {
276                 del_pset(elem->block_hash);
277                 elem->block_hash = NULL;
278         }  /* if */
279
280         if (elem->extbb_hash) {
281                 del_pset(elem->extbb_hash);
282                 elem->extbb_hash = NULL;
283         }  /* if */
284
285         obstack_free(&elem->recalc_cnts, NULL);
286         obstack_init(&elem->recalc_cnts);
287 }  /* graph_clear_entry */
288
289 /**
290  * Returns the associated graph_entry_t for an IR graph.
291  *
292  * @param irg   the IR graph, NULL for the global counter
293  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
294  */
295 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
296 {
297         graph_entry_t key;
298         graph_entry_t *elem;
299         size_t i;
300
301         key.irg = irg;
302
303         elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg));
304
305         if (elem) {
306                 /* create hash map backend block information */
307                 if (! elem->be_block_hash)
308                         elem->be_block_hash = new_pset(be_block_cmp, 5);
309
310                 return elem;
311         }  /* if */
312
313         /* allocate a new one */
314         elem = OALLOCZ(&status->cnts, graph_entry_t);
315         obstack_init(&elem->recalc_cnts);
316
317         /* clear counter */
318         graph_clear_entry(elem, 1);
319
320         /* new hash table for opcodes here  */
321         elem->opcode_hash   = new_pset(opcode_cmp, 5);
322         elem->address_mark  = new_set(address_mark_cmp, 5);
323         elem->irg           = irg;
324
325         /* these hash tables are created on demand */
326         elem->block_hash = NULL;
327         elem->extbb_hash = NULL;
328
329         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
330                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
331
332         return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg));
333 }  /* graph_get_entry */
334
335 /**
336  * Clear all counter in an opt_entry_t.
337  */
338 static void opt_clear_entry(opt_entry_t *elem)
339 {
340         cnt_clr(&elem->count);
341 }  /* opt_clear_entry */
342
343 /**
344  * Returns the associated opt_entry_t for an IR operation.
345  *
346  * @param op    the IR operation
347  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
348  */
349 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
350 {
351         opt_entry_t key;
352         opt_entry_t *elem;
353
354         key.op = op;
355
356         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
357         if (elem)
358                 return elem;
359
360         elem = OALLOCZ(&status->cnts, opt_entry_t);
361
362         /* clear new counter */
363         opt_clear_entry(elem);
364
365         elem->op = op;
366
367         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
368 }  /* opt_get_entry */
369
370 /**
371  * clears all counter in a block_entry_t
372  */
373 static void block_clear_entry(block_entry_t *elem)
374 {
375         int i;
376
377         for (i = 0; i < _bcnt_last; ++i)
378                 cnt_clr(&elem->cnt[i]);
379 }  /* block_clear_entry */
380
381 /**
382  * Returns the associated block_entry_t for an block.
383  *
384  * @param block_nr  an IR  block number
385  * @param hmap      a hash map containing long -> block_entry_t
386  */
387 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
388 {
389         block_entry_t key;
390         block_entry_t *elem;
391
392         key.block_nr = block_nr;
393
394         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
395         if (elem)
396                 return elem;
397
398         elem = OALLOCZ(obst, block_entry_t);
399
400         /* clear new counter */
401         block_clear_entry(elem);
402
403         elem->block_nr = block_nr;
404
405         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
406 }  /* block_get_entry */
407
408 /**
409  * Clear all sets in be_block_entry_t.
410  */
411 static void be_block_clear_entry(be_block_entry_t *elem)
412 {
413         if (elem->reg_pressure)
414                 del_pset(elem->reg_pressure);
415
416         if (elem->sched_ready)
417                 stat_delete_distrib_tbl(elem->sched_ready);
418
419         if (elem->perm_class_stat)
420                 del_pset(elem->perm_class_stat);
421
422         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
423         elem->sched_ready     = stat_new_int_distrib_tbl();
424         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
425 }  /* be_block_clear_entry */
426
427 /**
428  * Returns the associated be_block_entry_t for an block.
429  *
430  * @param block_nr  an IR  block number
431  * @param hmap      a hash map containing long -> be_block_entry_t
432  */
433 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
434 {
435         be_block_entry_t key;
436         be_block_entry_t *elem;
437
438         key.block_nr = block_nr;
439
440         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
441         if (elem)
442                 return elem;
443
444         elem = OALLOCZ(obst, be_block_entry_t);
445
446         /* clear new counter */
447         be_block_clear_entry(elem);
448
449         elem->block_nr = block_nr;
450
451         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
452 }  /* be_block_get_entry */
453
454 /**
455  * clears all sets in perm_class_entry_t
456  */
457 static void perm_class_clear_entry(perm_class_entry_t *elem)
458 {
459         if (elem->perm_stat)
460                 del_pset(elem->perm_stat);
461
462         elem->perm_stat = new_pset(perm_stat_cmp, 5);
463 }  /* perm_class_clear_entry */
464
465 /**
466  * Returns the associated perm_class entry for a register class.
467  *
468  * @param class_name  the register class name
469  * @param hmap        a hash map containing class_name -> perm_class_entry_t
470  */
471 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
472                                                 hmap_perm_class_entry_t *hmap)
473 {
474         perm_class_entry_t key;
475         perm_class_entry_t *elem;
476
477         key.class_name = class_name;
478
479         elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name));
480         if (elem)
481                 return elem;
482
483         elem = OALLOCZ(obst, perm_class_entry_t);
484
485         /* clear new counter */
486         perm_class_clear_entry(elem);
487
488         elem->class_name = class_name;
489
490         return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name));
491 }  /* perm_class_get_entry */
492
493 /**
494  * clears all sets in perm_stat_entry_t
495  */
496 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
497 {
498         if (elem->chains)
499                 stat_delete_distrib_tbl(elem->chains);
500
501         if (elem->cycles)
502                 stat_delete_distrib_tbl(elem->cycles);
503
504         elem->chains = stat_new_int_distrib_tbl();
505         elem->cycles = stat_new_int_distrib_tbl();
506 }  /* perm_stat_clear_entry */
507
508 /**
509  * Returns the associated perm_stat entry for a perm.
510  *
511  * @param perm      the perm node
512  * @param hmap      a hash map containing perm -> perm_stat_entry_t
513  */
514 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
515 {
516         perm_stat_entry_t key;
517         perm_stat_entry_t *elem;
518
519         key.perm = perm;
520
521         elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm));
522         if (elem)
523                 return elem;
524
525         elem = OALLOCZ(obst, perm_stat_entry_t);
526
527         /* clear new counter */
528         perm_stat_clear_entry(elem);
529
530         elem->perm = perm;
531
532         return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm));
533 }  /* perm_stat_get_entry */
534
535 /**
536  * Clear optimizations counter,
537  */
538 static void clear_optimization_counter(void)
539 {
540         int i;
541         for (i = 0; i < FS_OPT_MAX; ++i)
542                 cnt_clr(&status->num_opts[i]);
543 }
544
545 /**
546  * Returns the ir_op for an IR-node,
547  * handles special cases and return pseudo op codes.
548  *
549  * @param none  an IR node
550  */
551 static ir_op *stat_get_irn_op(ir_node *node)
552 {
553         ir_op *op = get_irn_op(node);
554         unsigned opc = op->code;
555
556         switch (opc) {
557         case iro_Phi:
558                 if (get_irn_arity(node) == 0) {
559                         /* special case, a Phi0 node, count on extra counter */
560                         op = status->op_Phi0 ? status->op_Phi0 : op;
561                 } else if (get_irn_mode(node) == mode_M) {
562                         /* special case, a Memory Phi node, count on extra counter */
563                         op = status->op_PhiM ? status->op_PhiM : op;
564                 }  /* if */
565                 break;
566         case iro_Proj:
567                 if (get_irn_mode(node) == mode_M) {
568                         /* special case, a Memory Proj node, count on extra counter */
569                         op = status->op_ProjM ? status->op_ProjM : op;
570                 }  /* if */
571                 break;
572         case iro_Mul:
573                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
574                         /* special case, a Multiply by a const, count on extra counter */
575                         op = status->op_MulC ? status->op_MulC : op;
576                 }  /* if */
577                 break;
578         case iro_Div:
579                 if (is_Const(get_Div_right(node))) {
580                         /* special case, a division by a const, count on extra counter */
581                         op = status->op_DivC ? status->op_DivC : op;
582                 }  /* if */
583                 break;
584         case iro_Mod:
585                 if (is_Const(get_Mod_right(node))) {
586                         /* special case, a module by a const, count on extra counter */
587                         op = status->op_ModC ? status->op_ModC : op;
588                 }  /* if */
589                 break;
590         case iro_Sel:
591                 if (is_Sel(get_Sel_ptr(node))) {
592                         /* special case, a Sel of a Sel, count on extra counter */
593                         op = status->op_SelSel ? status->op_SelSel : op;
594                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
595                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
596                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
597                         }  /* if */
598                 }  /* if */
599                 break;
600         default:
601                 ;
602         }  /* switch */
603
604         return op;
605 }  /* stat_get_irn_op */
606
607 /**
608  * update the block counter
609  */
610 static void undate_block_info(ir_node *node, graph_entry_t *graph)
611 {
612         ir_op *op = get_irn_op(node);
613         ir_node *block;
614         block_entry_t *b_entry;
615         int i, arity;
616
617         /* check for block */
618         if (op == op_Block) {
619                 arity = get_irn_arity(node);
620                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
621                 /* mark start end block to allow to filter them out */
622                 if (node == get_irg_start_block(graph->irg))
623                         b_entry->is_start = 1;
624                 else if (node == get_irg_end_block(graph->irg))
625                         b_entry->is_end = 1;
626
627                 /* count all incoming edges */
628                 for (i = 0; i < arity; ++i) {
629                         ir_node *pred = get_irn_n(node, i);
630                         ir_node *other_block = get_nodes_block(pred);
631                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
632
633                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
634                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
635                 }  /* for */
636                 return;
637         }  /* if */
638
639         block   = get_nodes_block(node);
640         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
641
642         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
643                 /* count data Phi per block */
644                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
645         }  /* if */
646
647         /* we have a new node in our block */
648         cnt_inc(&b_entry->cnt[bcnt_nodes]);
649
650         /* don't count keep-alive edges */
651         if (is_End(node))
652                 return;
653
654         arity = get_irn_arity(node);
655
656         for (i = 0; i < arity; ++i) {
657                 ir_node *pred = get_irn_n(node, i);
658                 ir_node *other_block;
659
660                 other_block = get_nodes_block(pred);
661
662                 if (other_block == block)
663                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
664                 else {
665                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
666
667                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
668                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
669                 }  /* if */
670         }  /* for */
671 }  /* undate_block_info */
672
673 /**
674  * Update the extended block counter.
675  */
676 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
677 {
678         ir_op *op = get_irn_op(node);
679         ir_extblk *extbb;
680         extbb_entry_t *eb_entry;
681         int i, arity;
682
683         /* check for block */
684         if (op == op_Block) {
685                 extbb = get_nodes_extbb(node);
686                 arity = get_irn_arity(node);
687                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
688
689                 /* count all incoming edges */
690                 for (i = 0; i < arity; ++i) {
691                         ir_node *pred = get_irn_n(node, i);
692                         ir_extblk *other_extbb = get_nodes_extbb(pred);
693
694                         if (extbb != other_extbb) {
695                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
696
697                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
698                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
699                         }  /* if */
700                 }  /* for */
701                 return;
702         }  /* if */
703
704         extbb    = get_nodes_extbb(node);
705         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
706
707         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
708                 /* count data Phi per extbb */
709                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
710         }  /* if */
711
712         /* we have a new node in our block */
713         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
714
715         /* don't count keep-alive edges */
716         if (is_End(node))
717                 return;
718
719         arity = get_irn_arity(node);
720
721         for (i = 0; i < arity; ++i) {
722                 ir_node *pred = get_irn_n(node, i);
723                 ir_extblk *other_extbb = get_nodes_extbb(pred);
724
725                 if (other_extbb == extbb)
726                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
727                 else {
728                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
729
730                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
731                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
732                 }  /* if */
733         }  /* for */
734 }  /* update_extbb_info */
735
736 /**
737  * Calculates how many arguments of the call are const, updates
738  * param distribution.
739  */
740 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
741 {
742         int i, num_const_args = 0, num_local_adr = 0;
743         int n = get_Call_n_params(call);
744
745         for (i = 0; i < n; ++i) {
746                 ir_node *param = get_Call_param(call, i);
747
748                 if (is_irn_constlike(param))
749                         ++num_const_args;
750                 else if (is_Sel(param)) {
751                         ir_node *base = param;
752
753                         do {
754                                 base = get_Sel_ptr(base);
755                         } while (is_Sel(base));
756
757                         if (base == get_irg_frame(current_ir_graph))
758                                 ++num_local_adr;
759                 }
760
761         }  /* for */
762
763         if (num_const_args > 0)
764                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
765         if (num_const_args == n)
766                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
767         if (num_local_adr > 0)
768                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
769
770         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
771 }  /* analyse_params_of_Call */
772
773 /**
774  * Update info on calls.
775  *
776  * @param call   The call
777  * @param graph  The graph entry containing the call
778  */
779 static void stat_update_call(ir_node *call, graph_entry_t *graph)
780 {
781         ir_node   *block = get_nodes_block(call);
782         ir_node   *ptr = get_Call_ptr(call);
783         ir_entity *ent = NULL;
784         ir_graph  *callee = NULL;
785
786         /*
787          * If the block is bad, the whole subgraph will collapse later
788          * so do not count this call.
789          * This happens in dead code.
790          */
791         if (is_Bad(block))
792                 return;
793
794         cnt_inc(&graph->cnt[gcnt_all_calls]);
795
796         /* found a call, this function is not a leaf */
797         graph->is_leaf = 0;
798
799         if (is_SymConst(ptr)) {
800                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
801                         /* ok, we seems to know the entity */
802                         ent = get_SymConst_entity(ptr);
803                         callee = get_entity_irg(ent);
804
805                         /* it is recursive, if it calls at least once */
806                         if (callee == graph->irg)
807                                 graph->is_recursive = 1;
808                         if (callee == NULL)
809                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
810                 }  /* if */
811         } else {
812                 /* indirect call, be could not predict */
813                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
814
815                 /* NOT a leaf call */
816                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
817         }  /* if */
818
819         /* check, if it's a chain-call: Then, the call-block
820          * must dominate the end block. */
821         {
822                 ir_node *curr = get_irg_end_block(graph->irg);
823                 int depth = get_Block_dom_depth(block);
824
825                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
826                         curr = get_Block_idom(curr);
827
828                         if (! curr || !is_Block(curr))
829                                 break;
830                 }  /* for */
831
832                 if (curr != block)
833                         graph->is_chain_call = 0;
834         }
835
836         /* check, if the callee is a leaf */
837         if (callee) {
838                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
839
840                 if (called->is_analyzed) {
841                         if (! called->is_leaf)
842                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
843                 }  /* if */
844         }  /* if */
845
846         analyse_params_of_Call(graph, call);
847 }  /* stat_update_call */
848
849 /**
850  * Update info on calls for graphs on the wait queue.
851  */
852 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
853 {
854         ir_node   *block = get_nodes_block(call);
855         ir_node   *ptr = get_Call_ptr(call);
856         ir_entity *ent = NULL;
857         ir_graph  *callee = NULL;
858
859         /*
860          * If the block is bad, the whole subgraph will collapse later
861          * so do not count this call.
862          * This happens in dead code.
863          */
864         if (is_Bad(block))
865                 return;
866
867         if (is_SymConst(ptr)) {
868                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
869                         /* ok, we seems to know the entity */
870                         ent = get_SymConst_entity(ptr);
871                         callee = get_entity_irg(ent);
872                 }  /* if */
873         }  /* if */
874
875         /* check, if the callee is a leaf */
876         if (callee) {
877                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
878
879                 assert(called->is_analyzed);
880
881                 if (! called->is_leaf)
882                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
883         } else
884                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
885 }  /* stat_update_call_2 */
886
887 /**
888  * Find the base address and entity of an Sel node.
889  *
890  * @param sel  the node
891  *
892  * @return the base address.
893  */
894 static ir_node *find_base_adr(ir_node *sel)
895 {
896         ir_node *ptr = get_Sel_ptr(sel);
897
898         while (is_Sel(ptr)) {
899                 sel = ptr;
900                 ptr = get_Sel_ptr(sel);
901         }
902         return ptr;
903 }  /* find_base_adr */
904
905 /**
906  * Update info on Load/Store address statistics.
907  */
908 static void stat_update_address(ir_node *node, graph_entry_t *graph)
909 {
910         unsigned opc = get_irn_opcode(node);
911         ir_node *base;
912         ir_graph *irg;
913
914         switch (opc) {
915         case iro_SymConst:
916                 /* a global address */
917                 cnt_inc(&graph->cnt[gcnt_global_adr]);
918                 break;
919         case iro_Sel:
920                 base = find_base_adr(node);
921                 irg = current_ir_graph;
922                 if (base == get_irg_tls(irg)) {
923                         /* a TLS variable, like a global. */
924                         cnt_inc(&graph->cnt[gcnt_global_adr]);
925                 } else if (base == get_irg_frame(irg)) {
926                         /* a local Variable. */
927                         cnt_inc(&graph->cnt[gcnt_local_adr]);
928                 } else {
929                         /* Pointer access */
930                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
931                                 /* pointer access through parameter, check for THIS */
932                                 ir_entity *ent = get_irg_entity(irg);
933
934                                 if (ent != NULL) {
935                                         ir_type *ent_tp = get_entity_type(ent);
936
937                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
938                                                 if (get_Proj_proj(base) == 0) {
939                                                         /* THIS pointer */
940                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
941                                                         goto end_parameter;
942                                                 }  /* if */
943                                         }  /* if */
944                                 }  /* if */
945                                 /* other parameter */
946                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
947 end_parameter: ;
948                         } else {
949                                 /* unknown Pointer access */
950                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
951                         }  /* if */
952                 }  /* if */
953         default:
954                 ;
955         }  /* switch */
956 }  /* stat_update_address */
957
958 /**
959  * Walker for reachable nodes count.
960  */
961 static void update_node_stat(ir_node *node, void *env)
962 {
963         graph_entry_t *graph = (graph_entry_t*)env;
964         node_entry_t *entry;
965
966         ir_op *op = stat_get_irn_op(node);
967         int i, arity = get_irn_arity(node);
968
969         entry = opcode_get_entry(op, graph->opcode_hash);
970
971         cnt_inc(&entry->cnt_alive);
972         cnt_add_i(&graph->cnt[gcnt_edges], arity);
973
974         /* count block edges */
975         undate_block_info(node, graph);
976
977         /* count extended block edges */
978         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
979                 if (graph->irg != get_const_code_irg())
980                         update_extbb_info(node, graph);
981         }  /* if */
982
983         /* handle statistics for special node types */
984
985         switch (op->code) {
986         case iro_Call:
987                 /* check for properties that depends on calls like recursion/leaf/indirect call */
988                 stat_update_call(node, graph);
989                 break;
990         case iro_Load:
991                 /* check address properties */
992                 stat_update_address(get_Load_ptr(node), graph);
993                 break;
994         case iro_Store:
995                 /* check address properties */
996                 stat_update_address(get_Store_ptr(node), graph);
997                 break;
998         case iro_Phi:
999                 /* check for non-strict Phi nodes */
1000                 for (i = arity - 1; i >= 0; --i) {
1001                         ir_node *pred = get_Phi_pred(node, i);
1002                         if (is_Unknown(pred)) {
1003                                 /* found an Unknown predecessor, graph is not strict */
1004                                 graph->is_strict = 0;
1005                                 break;
1006                         }
1007                 }
1008         default:
1009                 ;
1010         }  /* switch */
1011
1012         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1013         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1014                 int i;
1015
1016                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1017                         ir_node *pred = get_irn_n(node, i);
1018
1019                         if (is_Const(pred)) {
1020                                 /* check properties of constants */
1021                                 stat_update_const(status, pred, graph);
1022                         }  /* if */
1023                 }  /* for */
1024         }  /* if */
1025 }  /* update_node_stat */
1026
1027 /**
1028  * Walker for reachable nodes count for graphs on the wait_q.
1029  */
1030 static void update_node_stat_2(ir_node *node, void *env)
1031 {
1032         graph_entry_t *graph = (graph_entry_t*)env;
1033
1034         /* check for properties that depends on calls like recursion/leaf/indirect call */
1035         if (is_Call(node))
1036                 stat_update_call_2(node, graph);
1037 }  /* update_node_stat_2 */
1038
1039 /**
1040  * Get the current address mark.
1041  */
1042 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1043 {
1044         address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1045
1046         return value ? value->mark : 0;
1047 }  /* get_adr_mark */
1048
1049 /**
1050  * Set the current address mark.
1051  */
1052 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1053 {
1054         address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1055
1056         value->mark = val;
1057 }  /* set_adr_mark */
1058
1059 #undef DUMP_ADR_MODE
1060
1061 #ifdef DUMP_ADR_MODE
1062 /**
1063  * a vcg attribute hook: Color a node with a different color if
1064  * it's identified as a part of an address expression or at least referenced
1065  * by an address expression.
1066  */
1067 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1068 {
1069         ir_node *n           = local ? local : node;
1070         ir_graph *irg        = get_irn_irg(n);
1071         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1072         unsigned mark        = get_adr_mark(graph, n);
1073
1074         if (mark & MARK_ADDRESS_CALC)
1075                 fprintf(F, "color: purple");
1076         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1077                 fprintf(F, "color: pink");
1078         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1079                 fprintf(F, "color: lightblue");
1080         else
1081                 return 0;
1082
1083         /* I know the color! */
1084         return 1;
1085 }  /* stat_adr_mark_hook */
1086 #endif /* DUMP_ADR_MODE */
1087
1088 /**
1089  * Return the "operational" mode of a Firm node.
1090  */
1091 static ir_mode *get_irn_op_mode(ir_node *node)
1092 {
1093         switch (get_irn_opcode(node)) {
1094         case iro_Load:
1095                 return get_Load_mode(node);
1096         case iro_Store:
1097                 return get_irn_mode(get_Store_value(node));
1098         case iro_Div:
1099                 return get_irn_mode(get_Div_left(node));
1100         case iro_Mod:
1101                 return get_irn_mode(get_Mod_left(node));
1102         case iro_Cmp:
1103                 /* Cmp is no address calculation, or is it? */
1104         default:
1105                 return get_irn_mode(node);
1106         }  /* switch */
1107 }  /* get_irn_op_mode */
1108
1109 /**
1110  * Post-walker that marks every node that is an address calculation.
1111  *
1112  * Users of a node must be visited first. We ensure this by
1113  * calling it in the post of an outs walk. This should work even in cycles,
1114  * while the normal pre-walk will not.
1115  */
1116 static void mark_address_calc(ir_node *node, void *env)
1117 {
1118         graph_entry_t *graph = (graph_entry_t*)env;
1119         ir_mode *mode = get_irn_op_mode(node);
1120         int i, n;
1121         unsigned mark_preds = MARK_REF_NON_ADR;
1122
1123         if (! mode_is_data(mode))
1124                 return;
1125
1126         if (mode_is_reference(mode)) {
1127                 /* a reference is calculated here, we are sure */
1128                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1129
1130                 mark_preds = MARK_REF_ADR;
1131         } else {
1132                 unsigned mark = get_adr_mark(graph, node);
1133
1134                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1135                         /*
1136                          * this node has no reference mode, but is only
1137                          * referenced by address calculations
1138                          */
1139                         mark_preds = MARK_REF_ADR;
1140                 }  /* if */
1141         }  /* if */
1142
1143         /* mark all predecessors */
1144         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1145                 ir_node *pred = get_irn_n(node, i);
1146
1147                 mode = get_irn_op_mode(pred);
1148                 if (! mode_is_data(mode))
1149                         continue;
1150
1151                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1152         }  /* for */
1153 }  /* mark_address_calc */
1154
1155 /**
1156  * Post-walker that marks every node that is an address calculation.
1157  *
1158  * Users of a node must be visited first. We ensure this by
1159  * calling it in the post of an outs walk. This should work even in cycles,
1160  * while the normal pre-walk will not.
1161  */
1162 static void count_adr_ops(ir_node *node, void *env)
1163 {
1164         graph_entry_t *graph = (graph_entry_t*)env;
1165         unsigned mark        = get_adr_mark(graph, node);
1166
1167         if (mark & MARK_ADDRESS_CALC)
1168                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1169         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1170                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1171         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1172                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1173 }  /* count_adr_ops */
1174
1175 /**
1176  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1177  * is called, must recalculate all statistic info.
1178  *
1179  * @param global    The global entry
1180  * @param graph     The current entry
1181  */
1182 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1183 {
1184         node_entry_t *entry;
1185         int i;
1186
1187         /* clear first the alive counter in the graph */
1188         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1189                 cnt_clr(&entry->cnt_alive);
1190         }  /* foreach_pset */
1191
1192         /* set pessimistic values */
1193         graph->is_leaf       = 1;
1194         graph->is_leaf_call  = LCS_UNKNOWN;
1195         graph->is_recursive  = 0;
1196         graph->is_chain_call = 1;
1197         graph->is_strict     = 1;
1198
1199         /* create new block counter */
1200         graph->block_hash = new_pset(block_cmp, 5);
1201
1202         /* we need dominator info */
1203         if (graph->irg != get_const_code_irg()) {
1204                 assure_doms(graph->irg);
1205
1206                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1207                         /* we need extended basic blocks */
1208                         compute_extbb(graph->irg);
1209
1210                         /* create new extbb counter */
1211                         graph->extbb_hash = new_pset(block_cmp, 5);
1212                 }  /* if */
1213         }  /* if */
1214
1215         /* count the nodes in the graph */
1216         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1217
1218 #if 0
1219         /* Uncomment this code if chain-call means call exact one. */
1220         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1221
1222         /* check if we have more than 1 call */
1223         if (cnt_gt(entry->cnt_alive, 1))
1224                 graph->is_chain_call = 0;
1225 #endif
1226
1227         /* recursive functions are never chain calls, leafs don't have calls */
1228         if (graph->is_recursive || graph->is_leaf)
1229                 graph->is_chain_call = 0;
1230
1231         /* assume we walk every graph only ONCE, we could sum here the global count */
1232         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1233                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1234
1235                 /* update the node counter */
1236                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1237         }  /* foreach_pset */
1238
1239         /* count the number of address calculation */
1240         if (graph->irg != get_const_code_irg()) {
1241                 ir_graph *rem = current_ir_graph;
1242
1243                 assure_irg_outs(graph->irg);
1244
1245                 /* Must be done an the outs graph */
1246                 current_ir_graph = graph->irg;
1247                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1248                 current_ir_graph = rem;
1249
1250 #ifdef DUMP_ADR_MODE
1251                 /* register the vcg hook and dump the graph for test */
1252                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1253                 dump_ir_block_graph(graph->irg, "-adr");
1254                 set_dump_node_vcgattr_hook(NULL);
1255 #endif /* DUMP_ADR_MODE */
1256
1257                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1258         }  /* if */
1259
1260         /* count the DAG's */
1261         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1262                 count_dags_in_graph(global, graph);
1263
1264         /* calculate the patterns of this graph */
1265         stat_calc_pattern_history(graph->irg);
1266
1267         /* leaf function did not call others */
1268         if (graph->is_leaf)
1269                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1270         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1271                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1272                 pdeq_putl(status->wait_q, graph);
1273         }  /* if */
1274
1275         /* we have analyzed this graph */
1276         graph->is_analyzed = 1;
1277
1278         /* accumulate all counter's */
1279         for (i = 0; i < _gcnt_last; ++i)
1280                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1281 }  /* update_graph_stat */
1282
1283 /**
1284  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1285  * must finish all statistic info calculations.
1286  *
1287  * @param global    The global entry
1288  * @param graph     The current entry
1289  */
1290 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1291 {
1292         (void) global;
1293         if (graph->is_deleted) {
1294                 /* deleted, ignore */
1295                 return;
1296         }
1297
1298         if (graph->irg) {
1299                 /* count the nodes in the graph */
1300                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1301
1302                 if (graph->is_leaf_call == LCS_UNKNOWN)
1303                         graph->is_leaf_call = LCS_LEAF_CALL;
1304         }  /* if */
1305 }  /* update_graph_stat_2 */
1306
1307 /**
1308  * Register a dumper.
1309  */
1310 static void stat_register_dumper(const dumper_t *dumper)
1311 {
1312         dumper_t *p = XMALLOC(dumper_t);
1313
1314         memcpy(p, dumper, sizeof(*p));
1315
1316         p->next        = status->dumper;
1317         p->status      = status;
1318         status->dumper = p;
1319
1320         /* FIXME: memory leak */
1321 }  /* stat_register_dumper */
1322
1323 /**
1324  * Dumps the statistics of an IR graph.
1325  */
1326 static void stat_dump_graph(graph_entry_t *entry)
1327 {
1328         dumper_t *dumper;
1329
1330         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1331                 if (dumper->dump_graph)
1332                         dumper->dump_graph(dumper, entry);
1333         }  /* for */
1334 }  /* stat_dump_graph */
1335
1336 /**
1337  * Calls all registered dumper functions.
1338  */
1339 static void stat_dump_registered(graph_entry_t *entry)
1340 {
1341         dumper_t *dumper;
1342
1343         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1344                 if (dumper->func_map) {
1345                         dump_graph_FUNC func;
1346
1347                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1348                                 func(dumper, entry);
1349                 }  /* if */
1350         }  /* for */
1351 }  /* stat_dump_registered */
1352
1353 /**
1354  * Dumps a constant table.
1355  */
1356 static void stat_dump_consts(const constant_info_t *tbl)
1357 {
1358         dumper_t *dumper;
1359
1360         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1361                 if (dumper->dump_const_tbl)
1362                         dumper->dump_const_tbl(dumper, tbl);
1363         }  /* for */
1364 }  /* stat_dump_consts */
1365
1366 /**
1367  * Dumps the parameter distribution
1368  */
1369 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1370 {
1371         dumper_t *dumper;
1372
1373         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1374                 if (dumper->dump_param_tbl)
1375                         dumper->dump_param_tbl(dumper, tbl, global);
1376         }  /* for */
1377 }  /* stat_dump_param_tbl */
1378
1379 /**
1380  * Dumps the optimization counter
1381  */
1382 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1383 {
1384         dumper_t *dumper;
1385
1386         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1387                 if (dumper->dump_opt_cnt)
1388                         dumper->dump_opt_cnt(dumper, tbl, len);
1389         }  /* for */
1390 }  /* stat_dump_opt_cnt */
1391
1392 /**
1393  * Initialize the dumper.
1394  */
1395 static void stat_dump_init(const char *name)
1396 {
1397         dumper_t *dumper;
1398
1399         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1400                 if (dumper->init)
1401                         dumper->init(dumper, name);
1402         }  /* for */
1403 }  /* stat_dump_init */
1404
1405 /**
1406  * Finish the dumper.
1407  */
1408 static void stat_dump_finish(void)
1409 {
1410         dumper_t *dumper;
1411
1412         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1413                 if (dumper->finish)
1414                         dumper->finish(dumper);
1415         }  /* for */
1416 }  /* stat_dump_finish */
1417
1418 /**
1419  * Register an additional function for all dumper.
1420  */
1421 void stat_register_dumper_func(dump_graph_FUNC func)
1422 {
1423         dumper_t *dumper;
1424
1425         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1426                 if (! dumper->func_map)
1427                         dumper->func_map = pset_new_ptr(3);
1428                 pset_insert_ptr(dumper->func_map, (void*)func);
1429         }  /* for */
1430 }  /* stat_register_dumper_func */
1431
1432 /* ---------------------------------------------------------------------- */
1433
1434 /*
1435  * Helper: get an ir_op from an opcode.
1436  */
1437 ir_op *stat_get_op_from_opcode(ir_opcode code)
1438 {
1439         return opcode_find_entry(code, status->ir_op_hash);
1440 }  /* stat_get_op_from_opcode */
1441
1442 /**
1443  * Hook: A new IR op is registered.
1444  *
1445  * @param ctx  the hook context
1446  * @param op   the new IR opcode that was created.
1447  */
1448 static void stat_new_ir_op(void *ctx, ir_op *op)
1449 {
1450         (void) ctx;
1451         if (! status->stat_options)
1452                 return;
1453
1454         STAT_ENTER;
1455         {
1456                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1457
1458                 /* execute for side effect :-) */
1459                 (void)opcode_get_entry(op, graph->opcode_hash);
1460
1461                 pset_insert(status->ir_op_hash, op, op->code);
1462         }
1463         STAT_LEAVE;
1464 }  /* stat_new_ir_op */
1465
1466 /**
1467  * Hook: An IR op is freed.
1468  *
1469  * @param ctx  the hook context
1470  * @param op   the IR opcode that is freed
1471  */
1472 static void stat_free_ir_op(void *ctx, ir_op *op)
1473 {
1474         (void) ctx;
1475         (void) op;
1476         if (! status->stat_options)
1477                 return;
1478
1479         STAT_ENTER;
1480         {
1481         }
1482         STAT_LEAVE;
1483 }  /* stat_free_ir_op */
1484
1485 /**
1486  * Hook: A new node is created.
1487  *
1488  * @param ctx   the hook context
1489  * @param irg   the IR graph on which the node is created
1490  * @param node  the new IR node that was created
1491  */
1492 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1493 {
1494         (void) ctx;
1495         (void) irg;
1496         if (! status->stat_options)
1497                 return;
1498
1499         /* do NOT count during dead node elimination */
1500         if (status->in_dead_node_elim)
1501                 return;
1502
1503         STAT_ENTER;
1504         {
1505                 node_entry_t *entry;
1506                 graph_entry_t *graph;
1507                 ir_op *op = stat_get_irn_op(node);
1508
1509                 /* increase global value */
1510                 graph = graph_get_entry(NULL, status->irg_hash);
1511                 entry = opcode_get_entry(op, graph->opcode_hash);
1512                 cnt_inc(&entry->new_node);
1513
1514                 /* increase local value */
1515                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1516                 entry = opcode_get_entry(op, graph->opcode_hash);
1517                 cnt_inc(&entry->new_node);
1518         }
1519         STAT_LEAVE;
1520 }  /* stat_new_node */
1521
1522 /**
1523  * Hook: A node is changed into a Id node
1524  *
1525  * @param ctx   the hook context
1526  * @param node  the IR node that will be turned into an ID
1527  */
1528 static void stat_turn_into_id(void *ctx, ir_node *node)
1529 {
1530         (void) ctx;
1531         if (! status->stat_options)
1532                 return;
1533
1534         STAT_ENTER;
1535         {
1536                 node_entry_t *entry;
1537                 graph_entry_t *graph;
1538                 ir_op *op = stat_get_irn_op(node);
1539
1540                 /* increase global value */
1541                 graph = graph_get_entry(NULL, status->irg_hash);
1542                 entry = opcode_get_entry(op, graph->opcode_hash);
1543                 cnt_inc(&entry->into_Id);
1544
1545                 /* increase local value */
1546                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1547                 entry = opcode_get_entry(op, graph->opcode_hash);
1548                 cnt_inc(&entry->into_Id);
1549         }
1550         STAT_LEAVE;
1551 }  /* stat_turn_into_id */
1552
1553 /**
1554  * Hook: A node is normalized
1555  *
1556  * @param ctx   the hook context
1557  * @param node  the IR node that was normalized
1558  */
1559 static void stat_normalize(void *ctx, ir_node *node)
1560 {
1561         (void) ctx;
1562         if (! status->stat_options)
1563                 return;
1564
1565         STAT_ENTER;
1566         {
1567                 node_entry_t *entry;
1568                 graph_entry_t *graph;
1569                 ir_op *op = stat_get_irn_op(node);
1570
1571                 /* increase global value */
1572                 graph = graph_get_entry(NULL, status->irg_hash);
1573                 entry = opcode_get_entry(op, graph->opcode_hash);
1574                 cnt_inc(&entry->normalized);
1575
1576                 /* increase local value */
1577                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1578                 entry = opcode_get_entry(op, graph->opcode_hash);
1579                 cnt_inc(&entry->normalized);
1580         }
1581         STAT_LEAVE;
1582 }  /* stat_normalize */
1583
1584 /**
1585  * Hook: A new graph was created
1586  *
1587  * @param ctx  the hook context
1588  * @param irg  the new IR graph that was created
1589  * @param ent  the entity of this graph
1590  */
1591 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1592 {
1593         (void) ctx;
1594         if (! status->stat_options)
1595                 return;
1596
1597         STAT_ENTER;
1598         {
1599                 /* execute for side effect :-) */
1600                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1601
1602                 graph->ent           = ent;
1603                 graph->is_deleted    = 0;
1604                 graph->is_leaf       = 0;
1605                 graph->is_leaf_call  = 0;
1606                 graph->is_recursive  = 0;
1607                 graph->is_chain_call = 0;
1608                 graph->is_strict     = 1;
1609                 graph->is_analyzed   = 0;
1610         }
1611         STAT_LEAVE;
1612 }  /* stat_new_graph */
1613
1614 /**
1615  * Hook: A graph will be deleted
1616  *
1617  * @param ctx  the hook context
1618  * @param irg  the IR graph that will be deleted
1619  *
1620  * Note that we still hold the information for this graph
1621  * in our hash maps, only a flag is set which prevents this
1622  * information from being changed, it's "frozen" from now.
1623  */
1624 static void stat_free_graph(void *ctx, ir_graph *irg)
1625 {
1626         (void) ctx;
1627         if (! status->stat_options)
1628                 return;
1629
1630         STAT_ENTER;
1631         {
1632                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1633                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1634
1635                 graph->is_deleted = 1;
1636
1637                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1638                         /* count the nodes of the graph yet, it will be destroyed later */
1639                         update_graph_stat(global, graph);
1640                 }  /* if */
1641         }
1642         STAT_LEAVE;
1643 }  /* stat_free_graph */
1644
1645 /**
1646  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1647  *
1648  * @param ctx  the hook context
1649  * @param irg  the IR graph that will be walked
1650  * @param pre  the pre walker
1651  * @param post the post walker
1652  */
1653 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1654 {
1655         (void) ctx;
1656         (void) pre;
1657         (void) post;
1658         if (! status->stat_options)
1659                 return;
1660
1661         STAT_ENTER_SINGLE;
1662         {
1663                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1664
1665                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1666         }
1667         STAT_LEAVE;
1668 }  /* stat_irg_walk */
1669
1670 /**
1671  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1672  *
1673  * @param ctx  the hook context
1674  * @param irg  the IR graph that will be walked
1675  * @param pre  the pre walker
1676  * @param post the post walker
1677  */
1678 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1679 {
1680         /* for now, do NOT differentiate between blockwise and normal */
1681         stat_irg_walk(ctx, irg, pre, post);
1682 }  /* stat_irg_walk_blkwise */
1683
1684 /**
1685  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1686  *
1687  * @param ctx  the hook context
1688  * @param irg  the IR graph that will be walked
1689  * @param node the IR node
1690  * @param pre  the pre walker
1691  * @param post the post walker
1692  */
1693 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1694 {
1695         (void) ctx;
1696         (void) node;
1697         (void) pre;
1698         (void) post;
1699         if (! status->stat_options)
1700                 return;
1701
1702         STAT_ENTER_SINGLE;
1703         {
1704                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1705
1706                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1707         }
1708         STAT_LEAVE;
1709 }  /* stat_irg_block_walk */
1710
1711 /**
1712  * Called for every node that is removed due to an optimization.
1713  *
1714  * @param n     the IR node that will be removed
1715  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1716  * @param kind  the optimization kind
1717  */
1718 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1719 {
1720         opt_entry_t *entry;
1721         ir_op *op = stat_get_irn_op(n);
1722
1723         /* ignore CSE for Constants */
1724         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1725                 return;
1726
1727         /* increase global value */
1728         entry = opt_get_entry(op, hmap);
1729         cnt_inc(&entry->count);
1730 }  /* removed_due_opt */
1731
1732 /**
1733  * Hook: Some nodes were optimized into some others due to an optimization.
1734  *
1735  * @param ctx  the hook context
1736  */
1737 static void stat_merge_nodes(
1738     void *ctx,
1739     ir_node **new_node_array, int new_num_entries,
1740     ir_node **old_node_array, int old_num_entries,
1741     hook_opt_kind opt)
1742 {
1743         (void) ctx;
1744         if (! status->stat_options)
1745                 return;
1746
1747         STAT_ENTER;
1748         {
1749                 int i, j;
1750                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1751
1752                 cnt_inc(&status->num_opts[opt]);
1753                 if (status->reassoc_run)
1754                         opt = HOOK_OPT_REASSOC;
1755
1756                 for (i = 0; i < old_num_entries; ++i) {
1757                         /* nodes might be in new and old, so if we found a node
1758                            in both sets, this one  is NOT removed */
1759                         for (j = 0; j < new_num_entries; ++j) {
1760                                 if (old_node_array[i] == new_node_array[j])
1761                                         break;
1762                         }  /* for */
1763                         if (j >= new_num_entries) {
1764                                 int xopt = opt;
1765
1766                                 /* sometimes we did not detect, that it is replaced by a Const */
1767                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1768                                         ir_op *op = get_irn_op(new_node_array[0]);
1769
1770                                         if (op == op_Const || op == op_SymConst)
1771                                                 xopt = HOOK_OPT_CONFIRM_C;
1772                                 }  /* if */
1773
1774                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1775                         }  /* if */
1776                 }  /* for */
1777         }
1778         STAT_LEAVE;
1779 }  /* stat_merge_nodes */
1780
1781 /**
1782  * Hook: Reassociation is started/stopped.
1783  *
1784  * @param ctx   the hook context
1785  * @param flag  if non-zero, reassociation is started else stopped
1786  */
1787 static void stat_reassociate(void *ctx, int flag)
1788 {
1789         (void) ctx;
1790         if (! status->stat_options)
1791                 return;
1792
1793         STAT_ENTER;
1794         {
1795                 status->reassoc_run = flag;
1796         }
1797         STAT_LEAVE;
1798 }  /* stat_reassociate */
1799
1800 /**
1801  * Hook: A node was lowered into other nodes
1802  *
1803  * @param ctx  the hook context
1804  * @param node the IR node that will be lowered
1805  */
1806 static void stat_lower(void *ctx, ir_node *node)
1807 {
1808         (void) ctx;
1809         if (! status->stat_options)
1810                 return;
1811
1812         STAT_ENTER;
1813         {
1814                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1815
1816                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1817         }
1818         STAT_LEAVE;
1819 }  /* stat_lower */
1820
1821 /**
1822  * Hook: A graph was inlined.
1823  *
1824  * @param ctx  the hook context
1825  * @param call the IR call that will re changed into the body of
1826  *             the called IR graph
1827  * @param called_irg  the IR graph representing the called routine
1828  */
1829 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1830 {
1831         (void) ctx;
1832         if (! status->stat_options)
1833                 return;
1834
1835         STAT_ENTER;
1836         {
1837                 ir_graph *irg = get_irn_irg(call);
1838                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1839                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1840
1841                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1842                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1843         }
1844         STAT_LEAVE;
1845 }  /* stat_inline */
1846
1847 /**
1848  * Hook: A graph with tail-recursions was optimized.
1849  *
1850  * @param ctx  the hook context
1851  */
1852 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1853 {
1854         (void) ctx;
1855         if (! status->stat_options)
1856                 return;
1857
1858         STAT_ENTER;
1859         {
1860                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1861
1862                 graph->num_tail_recursion += n_calls;
1863         }
1864         STAT_LEAVE;
1865 }  /* stat_tail_rec */
1866
1867 /**
1868  * Strength reduction was performed on an iteration variable.
1869  *
1870  * @param ctx  the hook context
1871  */
1872 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1873 {
1874         (void) ctx;
1875         if (! status->stat_options)
1876                 return;
1877
1878         STAT_ENTER;
1879         {
1880                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1881                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1882
1883                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1884         }
1885         STAT_LEAVE;
1886 }  /* stat_strength_red */
1887
1888 /**
1889  * Hook: Start/Stop the dead node elimination.
1890  *
1891  * @param ctx  the hook context
1892  */
1893 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1894 {
1895         (void) ctx;
1896         (void) irg;
1897         if (! status->stat_options)
1898                 return;
1899
1900         status->in_dead_node_elim = (start != 0);
1901 }  /* stat_dead_node_elim */
1902
1903 /**
1904  * Hook: if-conversion was tried.
1905  */
1906 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1907                                int pos, ir_node *mux, if_result_t reason)
1908 {
1909         (void) context;
1910         (void) phi;
1911         (void) pos;
1912         (void) mux;
1913         if (! status->stat_options)
1914                 return;
1915
1916         STAT_ENTER;
1917         {
1918                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1919
1920                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1921         }
1922         STAT_LEAVE;
1923 }  /* stat_if_conversion */
1924
1925 /**
1926  * Hook: real function call was optimized.
1927  */
1928 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1929 {
1930         (void) context;
1931         (void) call;
1932         if (! status->stat_options)
1933                 return;
1934
1935         STAT_ENTER;
1936         {
1937                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1938
1939                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1940         }
1941         STAT_LEAVE;
1942 }  /* stat_func_call */
1943
1944 /**
1945  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1946  *
1947  * @param ctx  the hook context
1948  */
1949 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1950 {
1951         (void) ctx;
1952         if (! status->stat_options)
1953                 return;
1954
1955         STAT_ENTER;
1956         {
1957                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1958                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1959         }
1960         STAT_LEAVE;
1961 }  /* stat_arch_dep_replace_mul_with_shifts */
1962
1963 /**
1964  * Hook: A division by const was replaced.
1965  *
1966  * @param ctx   the hook context
1967  * @param node  the division node that will be optimized
1968  */
1969 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1970 {
1971         (void) ctx;
1972         if (! status->stat_options)
1973                 return;
1974
1975         STAT_ENTER;
1976         {
1977                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1978                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1979         }
1980         STAT_LEAVE;
1981 }  /* stat_arch_dep_replace_division_by_const */
1982
1983 /*
1984  * Update the register pressure of a block.
1985  *
1986  * @param irg        the irg containing the block
1987  * @param block      the block for which the reg pressure should be set
1988  * @param pressure   the pressure
1989  * @param class_name the name of the register class
1990  */
1991 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1992 {
1993         if (! status->stat_options)
1994                 return;
1995
1996         STAT_ENTER;
1997         {
1998                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1999                 be_block_entry_t     *block_ent;
2000                 reg_pressure_entry_t *rp_ent;
2001
2002                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2003                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
2004
2005                 rp_ent->class_name = class_name;
2006                 rp_ent->pressure   = pressure;
2007
2008                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2009         }
2010         STAT_LEAVE;
2011 }  /* stat_be_block_regpressure */
2012
2013 /**
2014  * Update the distribution of ready nodes of a block
2015  *
2016  * @param irg        the irg containing the block
2017  * @param block      the block for which the reg pressure should be set
2018  * @param num_ready  the number of ready nodes
2019  */
2020 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2021 {
2022         if (! status->stat_options)
2023                 return;
2024
2025         STAT_ENTER;
2026         {
2027                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2028                 be_block_entry_t *block_ent;
2029
2030                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2031
2032                 /* increase the counter of corresponding number of ready nodes */
2033                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2034         }
2035         STAT_LEAVE;
2036 }  /* stat_be_block_sched_ready */
2037
2038 /**
2039  * Update the permutation statistic of a block.
2040  *
2041  * @param class_name the name of the register class
2042  * @param n_regs     number of registers in the register class
2043  * @param perm       the perm node
2044  * @param block      the block containing the perm
2045  * @param size       the size of the perm
2046  * @param real_size  number of pairs with different registers
2047  */
2048 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2049                              int size, int real_size)
2050 {
2051         if (! status->stat_options)
2052                 return;
2053
2054         STAT_ENTER;
2055         {
2056                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2057                 be_block_entry_t   *block_ent;
2058                 perm_class_entry_t *pc_ent;
2059                 perm_stat_entry_t  *ps_ent;
2060
2061                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2062                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2063                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2064
2065                 pc_ent->n_regs = n_regs;
2066
2067                 /* update information */
2068                 ps_ent->size      = size;
2069                 ps_ent->real_size = real_size;
2070         }
2071         STAT_LEAVE;
2072 }  /* stat_be_block_stat_perm */
2073
2074 /**
2075  * Update the permutation statistic of a single perm.
2076  *
2077  * @param class_name the name of the register class
2078  * @param perm       the perm node
2079  * @param block      the block containing the perm
2080  * @param is_chain   1 if chain, 0 if cycle
2081  * @param size       length of the cycle/chain
2082  * @param n_ops      the number of ops representing this cycle/chain after lowering
2083  */
2084 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2085                                   int is_chain, int size, int n_ops)
2086 {
2087         if (! status->stat_options)
2088                 return;
2089
2090         STAT_ENTER;
2091         {
2092                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2093                 be_block_entry_t   *block_ent;
2094                 perm_class_entry_t *pc_ent;
2095                 perm_stat_entry_t  *ps_ent;
2096
2097                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2098                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2099                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2100
2101                 if (is_chain) {
2102                         ps_ent->n_copies += n_ops;
2103                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2104                 } else {
2105                         ps_ent->n_exchg += n_ops;
2106                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2107                 }  /* if */
2108         }
2109         STAT_LEAVE;
2110 }  /* stat_be_block_stat_permcycle */
2111
2112 /* Dumps a statistics snapshot. */
2113 void stat_dump_snapshot(const char *name, const char *phase)
2114 {
2115         char fname[2048];
2116         const char *p;
2117         size_t l;
2118
2119         if (! status->stat_options)
2120                 return;
2121
2122         STAT_ENTER;
2123         {
2124                 graph_entry_t *entry;
2125                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2126
2127                 /*
2128                  * The constant counter is only global, so we clear it here.
2129                  * Note that it does NOT contain the constants in DELETED
2130                  * graphs due to this.
2131                  */
2132                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2133                         stat_const_clear(status);
2134
2135                 /* build the name */
2136                 p = strrchr(name, '/');
2137 #ifdef _WIN32
2138                 {
2139                         const char *q;
2140
2141                         q = strrchr(name, '\\');
2142
2143                         /* NULL might be not the smallest pointer */
2144                         if (q && (!p || q > p))
2145                                 p = q;
2146                 }
2147 #endif /* _WIN32 */
2148                 if (p) {
2149                         ++p;
2150                         l = p - name;
2151
2152                         if (l > (int) (sizeof(fname) - 1))
2153                                 l = sizeof(fname) - 1;
2154
2155                         memcpy(fname, name, l);
2156                         fname[l] = '\0';
2157                 } else {
2158                         fname[0] = '\0';
2159                         p = name;
2160                 }  /* if */
2161                 strncat(fname, "firmstat-", sizeof(fname)-1);
2162                 strncat(fname, phase, sizeof(fname)-1);
2163                 strncat(fname, "-", sizeof(fname)-1);
2164                 strncat(fname, p, sizeof(fname)-1);
2165
2166                 stat_dump_init(fname);
2167
2168                 /* calculate the graph statistics */
2169                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2170                       entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2171                         if (entry->irg == NULL) {
2172                                 /* special entry for the global count */
2173                                 continue;
2174                         }  /* if */
2175                         if (! entry->is_deleted) {
2176                                 /* the graph is still alive, count the nodes on it */
2177                                 update_graph_stat(global, entry);
2178                         }  /* if */
2179                 }  /* for */
2180
2181                 /* some calculations are dependent, we pushed them on the wait_q */
2182                 while (! pdeq_empty(status->wait_q)) {
2183                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2184
2185                         update_graph_stat_2(global, entry);
2186                 }  /* while */
2187
2188                 /* dump per graph */
2189                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2190                      entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2191                         if (entry->irg == NULL) {
2192                                 /* special entry for the global count */
2193                                 continue;
2194                         }  /* if */
2195
2196                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2197                                 stat_dump_graph(entry);
2198                                 stat_dump_registered(entry);
2199                         }  /* if */
2200
2201                         if (! entry->is_deleted) {
2202                                 /* clear the counter that are not accumulated */
2203                                 graph_clear_entry(entry, 0);
2204                         }  /* if */
2205                 }  /* for */
2206
2207                 /* dump global */
2208                 stat_dump_graph(global);
2209
2210                 /* dump the const info */
2211                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2212                         stat_dump_consts(&status->const_info);
2213
2214                 /* dump the parameter distribution */
2215                 stat_dump_param_tbl(status->dist_param_cnt, global);
2216
2217                 /* dump the optimization counter and clear them */
2218                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2219                 clear_optimization_counter();
2220
2221                 stat_dump_finish();
2222
2223                 stat_finish_pattern_history(fname);
2224
2225                 /* clear the global counters here */
2226                 {
2227                         node_entry_t *entry;
2228
2229                         for (entry = (node_entry_t*)pset_first(global->opcode_hash);
2230                              entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) {
2231                                 opcode_clear_entry(entry);
2232                         }  /* for */
2233                         /* clear all global counter */
2234                         graph_clear_entry(global, /*all=*/1);
2235                 }
2236         }
2237         STAT_LEAVE;
2238 }  /* stat_dump_snapshot */
2239
2240 typedef struct pass_t {
2241         ir_prog_pass_t pass;
2242         const char     *fname;
2243         const char     *phase;
2244 } pass_t;
2245
2246 /**
2247  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2248  */
2249 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2250 {
2251         pass_t *pass = (pass_t*)context;
2252
2253         (void)irp;
2254         stat_dump_snapshot(pass->fname, pass->phase);
2255         return 0;
2256 }  /* stat_dump_snapshot_wrapper */
2257
2258 /**
2259  * Ensure that no verifier is run from the wrapper.
2260  */
2261 static int no_verify(ir_prog *prog, void *ctx)
2262 {
2263         (void)prog;
2264         (void)ctx;
2265         return 0;
2266 }
2267
2268 /**
2269  * Ensure that no dumper is run from the wrapper.
2270  */
2271 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2272 {
2273         (void)prog;
2274         (void)ctx;
2275         (void)idx;
2276 }
2277
2278 /* create an ir_pog pass */
2279 ir_prog_pass_t *stat_dump_snapshot_pass(
2280         const char *name, const char *fname, const char *phase)
2281 {
2282         pass_t *pass = XMALLOCZ(pass_t);
2283
2284         def_prog_pass_constructor(
2285                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2286         pass->fname = fname;
2287         pass->phase = phase;
2288
2289         /* no dump/verify */
2290         pass->pass.dump_irprog   = no_dump;
2291         pass->pass.verify_irprog = no_verify;
2292
2293         return &pass->pass;
2294 }  /* stat_dump_snapshot_pass */
2295
2296 /** the hook entries for the Firm statistics module */
2297 static hook_entry_t stat_hooks[hook_last];
2298
2299 /* initialize the statistics module. */
2300 void firm_init_stat(unsigned enable_options)
2301 {
2302 #define X(a)  a, sizeof(a)-1
2303 #define HOOK(h, fkt) \
2304         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2305         unsigned num = 0;
2306
2307         if (! (enable_options & FIRMSTAT_ENABLED))
2308                 return;
2309
2310         status = XMALLOCZ(stat_info_t);
2311
2312         /* enable statistics */
2313         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2314
2315         /* register all hooks */
2316         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2317         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2318         HOOK(hook_new_node,                           stat_new_node);
2319         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2320         HOOK(hook_normalize,                          stat_normalize);
2321         HOOK(hook_new_graph,                          stat_new_graph);
2322         HOOK(hook_free_graph,                         stat_free_graph);
2323         HOOK(hook_irg_walk,                           stat_irg_walk);
2324         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2325         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2326         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2327         HOOK(hook_reassociate,                        stat_reassociate);
2328         HOOK(hook_lower,                              stat_lower);
2329         HOOK(hook_inline,                             stat_inline);
2330         HOOK(hook_tail_rec,                           stat_tail_rec);
2331         HOOK(hook_strength_red,                       stat_strength_red);
2332         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2333         HOOK(hook_if_conversion,                      stat_if_conversion);
2334         HOOK(hook_func_call,                          stat_func_call);
2335         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2336         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2337
2338         obstack_init(&status->cnts);
2339         obstack_init(&status->be_data);
2340
2341         /* create the hash-tables */
2342         status->irg_hash   = new_pset(graph_cmp, 8);
2343         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2344
2345         /* create the wait queue */
2346         status->wait_q     = new_pdeq();
2347
2348         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2349                 /* build the pseudo-ops */
2350
2351                 _op_Phi0.code    = --num;
2352                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2353
2354                 _op_PhiM.code    = --num;
2355                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2356
2357                 _op_ProjM.code   = --num;
2358                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2359
2360                 _op_MulC.code    = --num;
2361                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2362
2363                 _op_DivC.code    = --num;
2364                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2365
2366                 _op_ModC.code    = --num;
2367                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2368
2369                 status->op_Phi0    = &_op_Phi0;
2370                 status->op_PhiM    = &_op_PhiM;
2371                 status->op_ProjM   = &_op_ProjM;
2372                 status->op_MulC    = &_op_MulC;
2373                 status->op_DivC    = &_op_DivC;
2374                 status->op_ModC    = &_op_ModC;
2375         } else {
2376                 status->op_Phi0    = NULL;
2377                 status->op_PhiM    = NULL;
2378                 status->op_ProjM   = NULL;
2379                 status->op_MulC    = NULL;
2380                 status->op_DivC    = NULL;
2381                 status->op_ModC    = NULL;
2382         }  /* if */
2383
2384         /* for Florian: count the Sel depth */
2385         if (enable_options & FIRMSTAT_COUNT_SELS) {
2386                 _op_SelSel.code    = --num;
2387                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2388
2389                 _op_SelSelSel.code = --num;
2390                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2391
2392                 status->op_SelSel    = &_op_SelSel;
2393                 status->op_SelSelSel = &_op_SelSelSel;
2394         } else {
2395                 status->op_SelSel    = NULL;
2396                 status->op_SelSelSel = NULL;
2397         }  /* if */
2398
2399         /* register the dumper */
2400         stat_register_dumper(&simple_dumper);
2401
2402         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2403                 stat_register_dumper(&csv_dumper);
2404
2405         /* initialize the pattern hash */
2406         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2407
2408         /* initialize the Const options */
2409         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2410                 stat_init_const_cnt(status);
2411
2412         /* distribution table for parameter counts */
2413         status->dist_param_cnt = stat_new_int_distrib_tbl();
2414
2415         clear_optimization_counter();
2416
2417 #undef HOOK
2418 #undef X
2419 }  /* firm_init_stat */
2420
2421 /**
2422  * Frees all dumper structures.
2423  */
2424 static void stat_term_dumper(void)
2425 {
2426         dumper_t *dumper, *next_dumper;
2427
2428         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2429                 if (dumper->func_map)
2430                         del_pset(dumper->func_map);
2431
2432                 next_dumper = dumper->next;
2433                 free(dumper);
2434                 dumper = next_dumper;
2435         }  /* for */
2436 }  /* stat_term_dumper */
2437
2438
2439 /* Terminates the statistics module, frees all memory. */
2440 void stat_term(void)
2441 {
2442         if (status != (stat_info_t *)&status_disable) {
2443                 obstack_free(&status->be_data, NULL);
2444                 obstack_free(&status->cnts, NULL);
2445
2446                 stat_term_dumper();
2447
2448                 xfree(status);
2449                 status = (stat_info_t *)&status_disable;
2450         }
2451 }  /* stat_term */
2452
2453 /* returns 1 if statistics were initialized, 0 otherwise */
2454 int stat_is_active(void)
2455 {
2456         return status != (stat_info_t *)&status_disable;
2457 }  /* stat_is_active */
2458
2459 #else
2460
2461 /* initialize the statistics module. */
2462 void firm_init_stat(unsigned enable_options) {}
2463
2464 /* Dumps a statistics snapshot */
2465 void stat_dump_snapshot(const char *name, const char *phase) {}
2466
2467 /* terminates the statistics module, frees all memory */
2468 void stat_term(void);
2469
2470 #endif /* FIRM_STATISTICS */