Create clean register requirements for Phis, otherwise reg_reqs_equal() and maybe...
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2008 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44
45 /*
46  * need this to be static:
47  * Special pseudo Opcodes that we need to count some interesting cases
48  */
49
50 /**
51  * The Phi0, a node that is created during SSA construction
52  */
53 static ir_op _op_Phi0;
54
55 /** The PhiM, just to count memory Phi's. */
56 static ir_op _op_PhiM;
57
58 /** The Mul by Const node. */
59 static ir_op _op_MulC;
60
61 /** The Div by Const node. */
62 static ir_op _op_DivC;
63
64 /** The Div by Const node. */
65 static ir_op _op_ModC;
66
67 /** The Div by Const node. */
68 static ir_op _op_DivModC;
69
70 /** The Quot by Const node. */
71 static ir_op _op_QuotC;
72
73 /** The memory Proj node. */
74 static ir_op _op_ProjM;
75
76 /** A Sel of a Sel */
77 static ir_op _op_SelSel;
78
79 /** A Sel of a Sel of a Sel */
80 static ir_op _op_SelSelSel;
81
82 /* ---------------------------------------------------------------------------------- */
83
84 /** Marks the begin of a statistic (hook) function. */
85 #define STAT_ENTER              ++status->recursive
86
87 /** Marks the end of a statistic (hook) functions. */
88 #define STAT_LEAVE              --status->recursive
89
90 /** Allows to enter a statistic function only when we are not already in a hook. */
91 #define STAT_ENTER_SINGLE       do { if (status->recursive > 0) return; ++status->recursive; } while (0)
92
93 /**
94  * global status
95  */
96 static const unsigned status_disable = 0;
97 static stat_info_t *status = (stat_info_t *)&status_disable;
98
99 /**
100  * Compare two elements of the opcode hash.
101  */
102 static int opcode_cmp(const void *elt, const void *key) {
103         const node_entry_t *e1 = elt;
104         const node_entry_t *e2 = key;
105
106         return e1->op->code - e2->op->code;
107 }  /* opcode_cmp */
108
109 /**
110  * Compare two elements of the graph hash.
111  */
112 static int graph_cmp(const void *elt, const void *key) {
113         const graph_entry_t *e1 = elt;
114         const graph_entry_t *e2 = key;
115
116         return e1->irg != e2->irg;
117 }  /* graph_cmp */
118
119 /**
120  * Compare two elements of the optimization hash.
121  */
122 static int opt_cmp(const void *elt, const void *key) {
123         const opt_entry_t *e1 = elt;
124         const opt_entry_t *e2 = key;
125
126         return e1->op->code != e2->op->code;
127 }  /* opt_cmp */
128
129 /**
130  * Compare two elements of the block/extbb hash.
131  */
132 static int block_cmp(const void *elt, const void *key) {
133         const block_entry_t *e1 = elt;
134         const block_entry_t *e2 = key;
135
136         /* it's enough to compare the block number */
137         return e1->block_nr != e2->block_nr;
138 }  /* block_cmp */
139
140 /**
141  * Compare two elements of the be_block hash.
142  */
143 static int be_block_cmp(const void *elt, const void *key) {
144         const be_block_entry_t *e1 = elt;
145         const be_block_entry_t *e2 = key;
146
147         return e1->block_nr != e2->block_nr;
148 }  /* be_block_cmp */
149
150 /**
151  * Compare two elements of reg pressure hash.
152  */
153 static int reg_pressure_cmp(const void *elt, const void *key) {
154         const reg_pressure_entry_t *e1 = elt;
155         const reg_pressure_entry_t *e2 = key;
156
157         return e1->class_name != e2->class_name;
158 }  /* reg_pressure_cmp */
159
160 /**
161  * Compare two elements of the perm_stat hash.
162  */
163 static int perm_stat_cmp(const void *elt, const void *key) {
164         const perm_stat_entry_t *e1 = elt;
165         const perm_stat_entry_t *e2 = key;
166
167         return e1->perm != e2->perm;
168 }  /* perm_stat_cmp */
169
170 /**
171  * Compare two elements of the perm_class hash.
172  */
173 static int perm_class_cmp(const void *elt, const void *key) {
174         const perm_class_entry_t *e1 = elt;
175         const perm_class_entry_t *e2 = key;
176
177         return e1->class_name != e2->class_name;
178 }  /* perm_class_cmp */
179
180 /**
181  * Compare two elements of the ir_op hash.
182  */
183 static int opcode_cmp_2(const void *elt, const void *key) {
184         const ir_op *e1 = elt;
185         const ir_op *e2 = key;
186
187         return e1->code != e2->code;
188 }  /* opcode_cmp_2 */
189
190 /**
191  * Compare two elements of the address_mark set.
192  */
193 static int address_mark_cmp(const void *elt, const void *key, size_t size) {
194         const address_mark_entry_t *e1 = elt;
195         const address_mark_entry_t *e2 = key;
196         (void) size;
197
198         /* compare only the nodes, the rest is used as data container */
199         return e1->node != e2->node;
200 }  /* address_mark_cmp */
201
202 /**
203  * Clear all counter in a node_entry_t.
204  */
205 static void opcode_clear_entry(node_entry_t *elem) {
206         cnt_clr(&elem->cnt_alive);
207         cnt_clr(&elem->new_node);
208         cnt_clr(&elem->into_Id);
209         cnt_clr(&elem->normalized);
210 }  /* opcode_clear_entry */
211
212 /**
213  * Returns the associates node_entry_t for an ir_op (and allocates
214  * one if not yet available).
215  *
216  * @param op    the IR operation
217  * @param hmap  a hash map containing ir_op* -> node_entry_t*
218  */
219 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap) {
220         node_entry_t key;
221         node_entry_t *elem;
222
223         key.op = op;
224
225         elem = pset_find(hmap, &key, op->code);
226         if (elem)
227                 return elem;
228
229         elem = obstack_alloc(&status->cnts, sizeof(*elem));
230         memset(elem, 0, sizeof(*elem));
231
232         /* clear counter */
233         opcode_clear_entry(elem);
234
235         elem->op = op;
236
237         return pset_insert(hmap, elem, op->code);
238 }  /* opcode_get_entry */
239
240 /**
241  * Returns the associates ir_op for an opcode
242  *
243  * @param code  the IR opcode
244  * @param hmap  the hash map containing opcode -> ir_op*
245  */
246 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap) {
247         ir_op key;
248
249         key.code = code;
250         return pset_find(hmap, &key, code);
251 }  /* opcode_find_entry */
252
253 /**
254  * Clears all counter in a graph_entry_t.
255  *
256  * @param elem  the graph entry
257  * @param all   if non-zero, clears all counters, else leave accumulated ones
258  */
259 static void graph_clear_entry(graph_entry_t *elem, int all) {
260         int i;
261
262         /* clear accumulated / non-accumulated counter */
263         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
264                 cnt_clr(&elem->cnt[i]);
265         }  /* for */
266
267         if (elem->block_hash) {
268                 del_pset(elem->block_hash);
269                 elem->block_hash = NULL;
270         }  /* if */
271
272         if (elem->extbb_hash) {
273                 del_pset(elem->extbb_hash);
274                 elem->extbb_hash = NULL;
275         }  /* if */
276
277         obstack_free(&elem->recalc_cnts, NULL);
278         obstack_init(&elem->recalc_cnts);
279 }  /* graph_clear_entry */
280
281 /**
282  * Returns the associated graph_entry_t for an IR graph.
283  *
284  * @param irg   the IR graph, NULL for the global counter
285  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
286  */
287 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
288 {
289         graph_entry_t key;
290         graph_entry_t *elem;
291         size_t i;
292
293         key.irg = irg;
294
295         elem = pset_find(hmap, &key, HASH_PTR(irg));
296
297         if (elem) {
298                 /* create hash map backend block information */
299                 if (! elem->be_block_hash)
300                         elem->be_block_hash = new_pset(be_block_cmp, 5);
301
302                 return elem;
303         }  /* if */
304
305         /* allocate a new one */
306         elem = obstack_alloc(&status->cnts, sizeof(*elem));
307         memset(elem, 0, sizeof(*elem));
308         obstack_init(&elem->recalc_cnts);
309
310         /* clear counter */
311         graph_clear_entry(elem, 1);
312
313         /* new hash table for opcodes here  */
314         elem->opcode_hash   = new_pset(opcode_cmp, 5);
315         elem->address_mark  = new_set(address_mark_cmp, 5);
316         elem->irg           = irg;
317
318         /* these hash tables are created on demand */
319         elem->block_hash = NULL;
320         elem->extbb_hash = NULL;
321
322         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
323                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
324
325         return pset_insert(hmap, elem, HASH_PTR(irg));
326 }  /* graph_get_entry */
327
328 /**
329  * Clear all counter in an opt_entry_t.
330  */
331 static void opt_clear_entry(opt_entry_t *elem) {
332         cnt_clr(&elem->count);
333 }  /* opt_clear_entry */
334
335 /**
336  * Returns the associated opt_entry_t for an IR operation.
337  *
338  * @param op    the IR operation
339  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
340  */
341 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
342 {
343         opt_entry_t key;
344         opt_entry_t *elem;
345
346         key.op = op;
347
348         elem = pset_find(hmap, &key, op->code);
349         if (elem)
350                 return elem;
351
352         elem = obstack_alloc(&status->cnts, sizeof(*elem));
353         memset(elem, 0, sizeof(*elem));
354
355         /* clear new counter */
356         opt_clear_entry(elem);
357
358         elem->op = op;
359
360         return pset_insert(hmap, elem, op->code);
361 }  /* opt_get_entry */
362
363 /**
364  * clears all counter in a block_entry_t
365  */
366 static void block_clear_entry(block_entry_t *elem) {
367         int i;
368
369         for (i = 0; i < _bcnt_last; ++i)
370                 cnt_clr(&elem->cnt[i]);
371 }  /* block_clear_entry */
372
373 /**
374  * Returns the associated block_entry_t for an block.
375  *
376  * @param block_nr  an IR  block number
377  * @param hmap      a hash map containing long -> block_entry_t
378  */
379 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
380 {
381         block_entry_t key;
382         block_entry_t *elem;
383
384         key.block_nr = block_nr;
385
386         elem = pset_find(hmap, &key, block_nr);
387         if (elem)
388                 return elem;
389
390         elem = obstack_alloc(obst, sizeof(*elem));
391         memset(elem, 0, sizeof(*elem));
392
393         /* clear new counter */
394         block_clear_entry(elem);
395
396         elem->block_nr = block_nr;
397
398         return pset_insert(hmap, elem, block_nr);
399 }  /* block_get_entry */
400
401 /**
402  * Clear all sets in be_block_entry_t.
403  */
404 static void be_block_clear_entry(be_block_entry_t *elem)
405 {
406         if (elem->reg_pressure)
407                 del_pset(elem->reg_pressure);
408
409         if (elem->sched_ready)
410                 stat_delete_distrib_tbl(elem->sched_ready);
411
412         if (elem->perm_class_stat)
413                 del_pset(elem->perm_class_stat);
414
415         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
416         elem->sched_ready     = stat_new_int_distrib_tbl();
417         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
418 }  /* be_block_clear_entry */
419
420 /**
421  * Returns the associated be_block_entry_t for an block.
422  *
423  * @param block_nr  an IR  block number
424  * @param hmap      a hash map containing long -> be_block_entry_t
425  */
426 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
427 {
428         be_block_entry_t key;
429         be_block_entry_t *elem;
430
431         key.block_nr = block_nr;
432
433         elem = pset_find(hmap, &key, block_nr);
434         if (elem)
435                 return elem;
436
437         elem = obstack_alloc(obst, sizeof(*elem));
438         memset(elem, 0, sizeof(*elem));
439
440         /* clear new counter */
441         be_block_clear_entry(elem);
442
443         elem->block_nr = block_nr;
444
445         return pset_insert(hmap, elem, block_nr);
446 }  /* be_block_get_entry */
447
448 /**
449  * clears all sets in perm_class_entry_t
450  */
451 static void perm_class_clear_entry(perm_class_entry_t *elem) {
452         if (elem->perm_stat)
453                 del_pset(elem->perm_stat);
454
455         elem->perm_stat = new_pset(perm_stat_cmp, 5);
456 }  /* perm_class_clear_entry */
457
458 /**
459  * Returns the associated perm_class entry for a register class.
460  *
461  * @param class_name  the register class name
462  * @param hmap        a hash map containing class_name -> perm_class_entry_t
463  */
464 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
465                                                 hmap_perm_class_entry_t *hmap)
466 {
467         perm_class_entry_t key;
468         perm_class_entry_t *elem;
469
470         key.class_name = class_name;
471
472         elem = pset_find(hmap, &key, HASH_PTR(class_name));
473         if (elem)
474                 return elem;
475
476         elem = obstack_alloc(obst, sizeof(*elem));
477         memset(elem, 0, sizeof(*elem));
478
479         /* clear new counter */
480         perm_class_clear_entry(elem);
481
482         elem->class_name = class_name;
483
484         return pset_insert(hmap, elem, HASH_PTR(class_name));
485 }  /* perm_class_get_entry */
486
487 /**
488  * clears all sets in perm_stat_entry_t
489  */
490 static void perm_stat_clear_entry(perm_stat_entry_t *elem) {
491         if (elem->chains)
492                 stat_delete_distrib_tbl(elem->chains);
493
494         if (elem->cycles)
495                 stat_delete_distrib_tbl(elem->cycles);
496
497         elem->chains = stat_new_int_distrib_tbl();
498         elem->cycles = stat_new_int_distrib_tbl();
499 }  /* perm_stat_clear_entry */
500
501 /**
502  * Returns the associated perm_stat entry for a perm.
503  *
504  * @param perm      the perm node
505  * @param hmap      a hash map containing perm -> perm_stat_entry_t
506  */
507 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
508 {
509         perm_stat_entry_t key;
510         perm_stat_entry_t *elem;
511
512         key.perm = perm;
513
514         elem = pset_find(hmap, &key, HASH_PTR(perm));
515         if (elem)
516                 return elem;
517
518         elem = obstack_alloc(obst, sizeof(*elem));
519         memset(elem, 0, sizeof(*elem));
520
521         /* clear new counter */
522         perm_stat_clear_entry(elem);
523
524         elem->perm = perm;
525
526         return pset_insert(hmap, elem, HASH_PTR(perm));
527 }  /* perm_stat_get_entry */
528
529 /**
530  * Clear optimizations counter,
531  */
532 static void clear_optimization_counter(void)  {
533         int i;
534         for (i = 0; i < FS_OPT_MAX; ++i)
535                 cnt_clr(&status->num_opts[i]);
536 }
537
538 /**
539  * Returns the ir_op for an IR-node,
540  * handles special cases and return pseudo op codes.
541  *
542  * @param none  an IR node
543  */
544 static ir_op *stat_get_irn_op(ir_node *node)
545 {
546         ir_op *op = get_irn_op(node);
547         ir_opcode opc = op->code;
548
549         switch (opc) {
550         case iro_Phi:
551                 if (get_irn_arity(node) == 0) {
552                         /* special case, a Phi0 node, count on extra counter */
553                         op = status->op_Phi0 ? status->op_Phi0 : op;
554                 } else if (get_irn_mode(node) == mode_M) {
555                         /* special case, a Memory Phi node, count on extra counter */
556                         op = status->op_PhiM ? status->op_PhiM : op;
557                 }  /* if */
558                 break;
559         case iro_Proj:
560                 if (get_irn_mode(node) == mode_M) {
561                         /* special case, a Memory Proj node, count on extra counter */
562                         op = status->op_ProjM ? status->op_ProjM : op;
563                 }  /* if */
564                 break;
565         case iro_Mul:
566                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
567                         /* special case, a Multiply by a const, count on extra counter */
568                         op = status->op_MulC ? status->op_MulC : op;
569                 }  /* if */
570                 break;
571         case iro_Div:
572                 if (is_Const(get_Div_right(node))) {
573                         /* special case, a division by a const, count on extra counter */
574                         op = status->op_DivC ? status->op_DivC : op;
575                 }  /* if */
576                 break;
577         case iro_Mod:
578                 if (is_Const(get_Mod_right(node))) {
579                         /* special case, a module by a const, count on extra counter */
580                         op = status->op_ModC ? status->op_ModC : op;
581                 }  /* if */
582                 break;
583         case iro_DivMod:
584                 if (is_Const(get_DivMod_right(node))) {
585                         /* special case, a division/modulo by a const, count on extra counter */
586                         op = status->op_DivModC ? status->op_DivModC : op;
587                 }  /* if */
588                 break;
589         case iro_Quot:
590                 if (is_Const(get_Quot_right(node))) {
591                         /* special case, a floating point division by a const, count on extra counter */
592                         op = status->op_QuotC ? status->op_QuotC : op;
593                 }  /* if */
594                 break;
595         case iro_Sel:
596                 if (is_Sel(get_Sel_ptr(node))) {
597                         /* special case, a Sel of a Sel, count on extra counter */
598                         op = status->op_SelSel ? status->op_SelSel : op;
599                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
600                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
601                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
602                         }  /* if */
603                 }  /* if */
604                 break;
605         default:
606                 ;
607         }  /* switch */
608
609         return op;
610 }  /* stat_get_irn_op */
611
612 /**
613  * update the block counter
614  */
615 static void undate_block_info(ir_node *node, graph_entry_t *graph)
616 {
617         ir_op *op = get_irn_op(node);
618         ir_node *block;
619         block_entry_t *b_entry;
620         int i, arity;
621
622         /* check for block */
623         if (op == op_Block) {
624                 arity = get_irn_arity(node);
625                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
626                 /* mark start end block to allow to filter them out */
627                 if (node == get_irg_start_block(graph->irg))
628                         b_entry->is_start = 1;
629                 else if (node == get_irg_end_block(graph->irg))
630                         b_entry->is_end = 1;
631
632                 /* count all incoming edges */
633                 for (i = 0; i < arity; ++i) {
634                         ir_node *pred = get_irn_n(node, i);
635                         ir_node *other_block = get_nodes_block(pred);
636                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
637
638                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
639                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
640                 }  /* for */
641                 return;
642         }  /* if */
643
644         block   = get_nodes_block(node);
645         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
646
647         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
648                 /* count data Phi per block */
649                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
650         }  /* if */
651
652         /* we have a new node in our block */
653         cnt_inc(&b_entry->cnt[bcnt_nodes]);
654
655         /* don't count keep-alive edges */
656         if (is_End(node))
657                 return;
658
659         arity = get_irn_arity(node);
660
661         for (i = 0; i < arity; ++i) {
662                 ir_node *pred = get_irn_n(node, i);
663                 ir_node *other_block;
664
665                 other_block = get_nodes_block(pred);
666
667                 if (other_block == block)
668                         cnt_inc(&b_entry->cnt[bcnt_edges]);     /* a in block edge */
669                 else {
670                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
671
672                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
673                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
674                 }  /* if */
675         }  /* for */
676 }  /* undate_block_info */
677
678 /**
679  * Update the extended block counter.
680  */
681 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
682 {
683         ir_op *op = get_irn_op(node);
684         ir_extblk *extbb;
685         extbb_entry_t *eb_entry;
686         int i, arity;
687
688         /* check for block */
689         if (op == op_Block) {
690                 extbb = get_nodes_extbb(node);
691                 arity = get_irn_arity(node);
692                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
693
694                 /* count all incoming edges */
695                 for (i = 0; i < arity; ++i) {
696                         ir_node *pred = get_irn_n(node, i);
697                         ir_extblk *other_extbb = get_nodes_extbb(pred);
698
699                         if (extbb != other_extbb) {
700                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
701
702                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
703                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
704                         }  /* if */
705                 }  /* for */
706                 return;
707         }  /* if */
708
709         extbb    = get_nodes_extbb(node);
710         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
711
712         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
713                 /* count data Phi per extbb */
714                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
715         }  /* if */
716
717         /* we have a new node in our block */
718         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
719
720         /* don't count keep-alive edges */
721         if (is_End(node))
722                 return;
723
724         arity = get_irn_arity(node);
725
726         for (i = 0; i < arity; ++i) {
727                 ir_node *pred = get_irn_n(node, i);
728                 ir_extblk *other_extbb = get_nodes_extbb(pred);
729
730                 if (other_extbb == extbb)
731                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
732                 else {
733                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
734
735                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
736                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
737                 }  /* if */
738         }  /* for */
739 }  /* update_extbb_info */
740
741 /**
742  * Calculates how many arguments of the call are const, updates
743  * param distribution.
744  */
745 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call) {
746         int i, num_const_args = 0, num_local_adr = 0;
747         int n = get_Call_n_params(call);
748
749         for (i = 0; i < n; ++i) {
750                 ir_node *param = get_Call_param(call, i);
751
752                 if (is_irn_constlike(param))
753                         ++num_const_args;
754                 else if (is_Sel(param)) {
755                         ir_node *base = param;
756
757                         do {
758                                 base = get_Sel_ptr(base);
759                         } while (is_Sel(base));
760
761                         if (base == get_irg_frame(current_ir_graph))
762                                 ++num_local_adr;
763                 }
764
765         }  /* for */
766
767         if (num_const_args > 0)
768                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
769         if (num_const_args == n)
770                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
771         if (num_local_adr > 0)
772                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
773
774         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
775 }  /* analyse_params_of_Call */
776
777 /**
778  * Update info on calls.
779  *
780  * @param call   The call
781  * @param graph  The graph entry containing the call
782  */
783 static void stat_update_call(ir_node *call, graph_entry_t *graph)
784 {
785         ir_node   *block = get_nodes_block(call);
786         ir_node   *ptr = get_Call_ptr(call);
787         ir_entity *ent = NULL;
788         ir_graph  *callee = NULL;
789
790         /*
791          * If the block is bad, the whole subgraph will collapse later
792          * so do not count this call.
793          * This happens in dead code.
794          */
795         if (is_Bad(block))
796                 return;
797
798         cnt_inc(&graph->cnt[gcnt_all_calls]);
799
800         /* found a call, this function is not a leaf */
801         graph->is_leaf = 0;
802
803         if (is_SymConst(ptr)) {
804                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
805                         /* ok, we seems to know the entity */
806                         ent = get_SymConst_entity(ptr);
807                         callee = get_entity_irg(ent);
808
809                         /* it is recursive, if it calls at least once */
810                         if (callee == graph->irg)
811                                 graph->is_recursive = 1;
812                         if (callee == NULL)
813                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
814                 }  /* if */
815         } else {
816                 /* indirect call, be could not predict */
817                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
818
819                 /* NOT a leaf call */
820                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
821         }  /* if */
822
823         /* check, if it's a chain-call: Then, the call-block
824          * must dominate the end block. */
825         {
826                 ir_node *curr = get_irg_end_block(graph->irg);
827                 int depth = get_Block_dom_depth(block);
828
829                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
830                         curr = get_Block_idom(curr);
831
832                         if (! curr || is_no_Block(curr))
833                                 break;
834                 }  /* for */
835
836                 if (curr != block)
837                         graph->is_chain_call = 0;
838         }
839
840         /* check, if the callee is a leaf */
841         if (callee) {
842                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
843
844                 if (called->is_analyzed) {
845                         if (! called->is_leaf)
846                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
847                 }  /* if */
848         }  /* if */
849
850         analyse_params_of_Call(graph, call);
851 }  /* stat_update_call */
852
853 /**
854  * Update info on calls for graphs on the wait queue.
855  */
856 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
857 {
858         ir_node   *block = get_nodes_block(call);
859         ir_node   *ptr = get_Call_ptr(call);
860         ir_entity *ent = NULL;
861         ir_graph  *callee = NULL;
862
863         /*
864          * If the block is bad, the whole subgraph will collapse later
865          * so do not count this call.
866          * This happens in dead code.
867          */
868         if (is_Bad(block))
869                 return;
870
871         if (is_SymConst(ptr)) {
872                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
873                         /* ok, we seems to know the entity */
874                         ent = get_SymConst_entity(ptr);
875                         callee = get_entity_irg(ent);
876                 }  /* if */
877         }  /* if */
878
879         /* check, if the callee is a leaf */
880         if (callee) {
881                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
882
883                 assert(called->is_analyzed);
884
885                 if (! called->is_leaf)
886                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
887         } else
888                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
889 }  /* stat_update_call_2 */
890
891 /**
892  * Find the base address and entity of an Sel node.
893  *
894  * @param sel  the node
895  *
896  * @return the base address.
897  */
898 static ir_node *find_base_adr(ir_node *sel) {
899         ir_node *ptr = get_Sel_ptr(sel);
900
901         while (is_Sel(ptr)) {
902                 sel = ptr;
903                 ptr = get_Sel_ptr(sel);
904         }
905         return ptr;
906 }  /* find_base_adr */
907
908 /**
909  * Update info on Load/Store address statistics.
910  */
911 static void stat_update_address(ir_node *node, graph_entry_t *graph) {
912         ir_opcode opc = get_irn_opcode(node);
913         ir_node *base;
914         ir_graph *irg;
915
916         switch (opc) {
917         case iro_SymConst:
918                 /* a global address */
919                 cnt_inc(&graph->cnt[gcnt_global_adr]);
920                 break;
921         case iro_Sel:
922                 base = find_base_adr(node);
923                 irg = current_ir_graph;
924                 if (base == get_irg_tls(irg)) {
925                         /* a TLS variable, like a global. */
926                         cnt_inc(&graph->cnt[gcnt_global_adr]);
927                 } else if (base == get_irg_frame(irg)) {
928                         /* a local Variable. */
929                         cnt_inc(&graph->cnt[gcnt_local_adr]);
930                 } else {
931                         /* Pointer access */
932                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
933                                 /* pointer access through parameter, check for THIS */
934                                 ir_entity *ent = get_irg_entity(irg);
935
936                                 if (ent != NULL) {
937                                         ir_type *ent_tp = get_entity_type(ent);
938
939                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
940                                                 if (get_Proj_proj(base) == 0) {
941                                                         /* THIS pointer */
942                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
943                                                         goto end_parameter;
944                                                 }  /* if */
945                                         }  /* if */
946                                 }  /* if */
947                                 /* other parameter */
948                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
949 end_parameter:  ;
950                         } else {
951                                 /* unknown Pointer access */
952                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
953                         }  /* if */
954                 }  /* if */
955         default:
956                 ;
957         }  /* switch */
958 }  /* stat_update_address */
959
960 /**
961  * Walker for reachable nodes count.
962  */
963 static void update_node_stat(ir_node *node, void *env)
964 {
965         graph_entry_t *graph = env;
966         node_entry_t *entry;
967
968         ir_op *op = stat_get_irn_op(node);
969         int i, arity = get_irn_arity(node);
970
971         entry = opcode_get_entry(op, graph->opcode_hash);
972
973         cnt_inc(&entry->cnt_alive);
974         cnt_add_i(&graph->cnt[gcnt_edges], arity);
975
976         /* count block edges */
977         undate_block_info(node, graph);
978
979         /* count extended block edges */
980         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
981                 if (graph->irg != get_const_code_irg())
982                         update_extbb_info(node, graph);
983         }  /* if */
984
985         /* handle statistics for special node types */
986
987         switch (op->code) {
988         case iro_Call:
989                 /* check for properties that depends on calls like recursion/leaf/indirect call */
990                 stat_update_call(node, graph);
991                 break;
992         case iro_Load:
993                 /* check address properties */
994                 stat_update_address(get_Load_ptr(node), graph);
995                 break;
996         case iro_Store:
997                 /* check address properties */
998                 stat_update_address(get_Store_ptr(node), graph);
999                 break;
1000         case iro_Phi:
1001                 /* check for non-strict Phi nodes */
1002                 for (i = arity - 1; i >= 0; --i) {
1003                         ir_node *pred = get_Phi_pred(node, i);
1004                         if (is_Unknown(pred)) {
1005                                 /* found an Unknown predecessor, graph is not strict */
1006                                 graph->is_strict = 0;
1007                                 break;
1008                         }
1009                 }
1010         default:
1011                 ;
1012         }  /* switch */
1013
1014         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1015         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1016                 int i;
1017
1018                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1019                         ir_node *pred = get_irn_n(node, i);
1020
1021                         if (is_Const(pred)) {
1022                                 /* check properties of constants */
1023                                 stat_update_const(status, pred, graph);
1024                         }  /* if */
1025                 }  /* for */
1026         }  /* if */
1027 }  /* update_node_stat */
1028
1029 /**
1030  * Walker for reachable nodes count for graphs on the wait_q.
1031  */
1032 static void update_node_stat_2(ir_node *node, void *env) {
1033         graph_entry_t *graph = env;
1034
1035         /* check for properties that depends on calls like recursion/leaf/indirect call */
1036         if (is_Call(node))
1037                 stat_update_call_2(node, graph);
1038 }  /* update_node_stat_2 */
1039
1040 /**
1041  * Get the current address mark.
1042  */
1043 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node) {
1044         address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1045
1046         return value ? value->mark : 0;
1047 }  /* get_adr_mark */
1048
1049 /**
1050  * Set the current address mark.
1051  */
1052 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val) {
1053         address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1054
1055         value->mark = val;
1056 }  /* set_adr_mark */
1057
1058 #undef DUMP_ADR_MODE
1059
1060 #ifdef DUMP_ADR_MODE
1061 /**
1062  * a vcg attribute hook: Color a node with a different color if
1063  * it's identified as a part of an address expression or at least referenced
1064  * by an address expression.
1065  */
1066 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1067 {
1068         ir_node *n           = local ? local : node;
1069         ir_graph *irg        = get_irn_irg(n);
1070         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1071         unsigned mark        = get_adr_mark(graph, n);
1072
1073         if (mark & MARK_ADDRESS_CALC)
1074                 fprintf(F, "color: purple");
1075         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1076                 fprintf(F, "color: pink");
1077         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1078                 fprintf(F, "color: lightblue");
1079         else
1080                 return 0;
1081
1082         /* I know the color! */
1083         return 1;
1084 }  /* stat_adr_mark_hook */
1085 #endif /* DUMP_ADR_MODE */
1086
1087 /**
1088  * Return the "operational" mode of a Firm node.
1089  */
1090 static ir_mode *get_irn_op_mode(ir_node *node) {
1091         switch (get_irn_opcode(node)) {
1092         case iro_Load:
1093                 return get_Load_mode(node);
1094         case iro_Store:
1095                 return get_irn_mode(get_Store_value(node));
1096         case iro_DivMod:
1097                 return get_irn_mode(get_DivMod_left(node));
1098         case iro_Div:
1099                 return get_irn_mode(get_Div_left(node));
1100         case iro_Mod:
1101                 return get_irn_mode(get_Mod_left(node));
1102         case iro_Cmp:
1103                 /* Cmp is no address calculation, or is it? */
1104         default:
1105                 return get_irn_mode(node);
1106         }  /* switch */
1107 }  /* get_irn_op_mode */
1108
1109 /**
1110  * Post-walker that marks every node that is an address calculation.
1111  *
1112  * Users of a node must be visited first. We ensure this by
1113  * calling it in the post of an outs walk. This should work even in cycles,
1114  * while the normal pre-walk will not.
1115  */
1116 static void mark_address_calc(ir_node *node, void *env) {
1117         graph_entry_t *graph = env;
1118         ir_mode *mode = get_irn_op_mode(node);
1119         int i, n;
1120         unsigned mark_preds = MARK_REF_NON_ADR;
1121
1122         if (! mode_is_data(mode))
1123                 return;
1124
1125         if (mode_is_reference(mode)) {
1126                 /* a reference is calculated here, we are sure */
1127                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1128
1129                 mark_preds = MARK_REF_ADR;
1130         } else {
1131                 unsigned mark = get_adr_mark(graph, node);
1132
1133                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1134                         /*
1135                          * this node has no reference mode, but is only
1136                          * referenced by address calculations
1137                          */
1138                         mark_preds = MARK_REF_ADR;
1139                 }  /* if */
1140         }  /* if */
1141
1142         /* mark all predecessors */
1143         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1144                 ir_node *pred = get_irn_n(node, i);
1145
1146                 mode = get_irn_op_mode(pred);
1147                 if (! mode_is_data(mode))
1148                         continue;
1149
1150                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1151         }  /* for */
1152 }  /* mark_address_calc */
1153
1154 /**
1155  * Post-walker that marks every node that is an address calculation.
1156  *
1157  * Users of a node must be visited first. We ensure this by
1158  * calling it in the post of an outs walk. This should work even in cycles,
1159  * while the normal pre-walk will not.
1160  */
1161 static void count_adr_ops(ir_node *node, void *env) {
1162         graph_entry_t *graph = env;
1163         unsigned mark        = get_adr_mark(graph, node);
1164
1165         if (mark & MARK_ADDRESS_CALC)
1166                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1167         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1168                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1169         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1170                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1171 }  /* count_adr_ops */
1172
1173 /**
1174  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1175  * is called, must recalculate all statistic info.
1176  *
1177  * @param global    The global entry
1178  * @param graph     The current entry
1179  */
1180 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1181 {
1182         node_entry_t *entry;
1183         int i;
1184
1185         /* clear first the alive counter in the graph */
1186         foreach_pset(graph->opcode_hash, entry) {
1187                 cnt_clr(&entry->cnt_alive);
1188         }  /* foreach_pset */
1189
1190         /* set pessimistic values */
1191         graph->is_leaf       = 1;
1192         graph->is_leaf_call  = LCS_UNKNOWN;
1193         graph->is_recursive  = 0;
1194         graph->is_chain_call = 1;
1195         graph->is_strict     = 1;
1196
1197         /* create new block counter */
1198         graph->block_hash = new_pset(block_cmp, 5);
1199
1200         /* we need dominator info */
1201         if (graph->irg != get_const_code_irg()) {
1202                 assure_doms(graph->irg);
1203
1204                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1205                         /* we need extended basic blocks */
1206                         compute_extbb(graph->irg);
1207
1208                         /* create new extbb counter */
1209                         graph->extbb_hash = new_pset(block_cmp, 5);
1210                 }  /* if */
1211         }  /* if */
1212
1213         /* count the nodes in the graph */
1214         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1215
1216 #if 0
1217         /* Uncomment this code if chain-call means call exact one. */
1218         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1219
1220         /* check if we have more than 1 call */
1221         if (cnt_gt(entry->cnt_alive, 1))
1222                 graph->is_chain_call = 0;
1223 #endif
1224
1225         /* recursive functions are never chain calls, leafs don't have calls */
1226         if (graph->is_recursive || graph->is_leaf)
1227                 graph->is_chain_call = 0;
1228
1229         /* assume we walk every graph only ONCE, we could sum here the global count */
1230         foreach_pset(graph->opcode_hash, entry) {
1231                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1232
1233                 /* update the node counter */
1234                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1235         }  /* foreach_pset */
1236
1237         /* count the number of address calculation */
1238         if (graph->irg != get_const_code_irg()) {
1239                 ir_graph *rem = current_ir_graph;
1240
1241                 assure_irg_outs(graph->irg);
1242
1243                 /* Must be done an the outs graph */
1244                 current_ir_graph = graph->irg;
1245                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1246                 current_ir_graph = rem;
1247
1248 #ifdef DUMP_ADR_MODE
1249                 /* register the vcg hook and dump the graph for test */
1250                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1251                 dump_ir_block_graph(graph->irg, "-adr");
1252                 set_dump_node_vcgattr_hook(NULL);
1253 #endif /* DUMP_ADR_MODE */
1254
1255                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1256         }  /* if */
1257
1258         /* count the DAG's */
1259         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1260                 count_dags_in_graph(global, graph);
1261
1262         /* calculate the patterns of this graph */
1263         stat_calc_pattern_history(graph->irg);
1264
1265         /* leaf function did not call others */
1266         if (graph->is_leaf)
1267                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1268         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1269                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1270                 pdeq_putl(status->wait_q, graph);
1271         }  /* if */
1272
1273         /* we have analyzed this graph */
1274         graph->is_analyzed = 1;
1275
1276         /* accumulate all counter's */
1277         for (i = 0; i < _gcnt_last; ++i)
1278                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1279 }  /* update_graph_stat */
1280
1281 /**
1282  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1283  * must finish all statistic info calculations.
1284  *
1285  * @param global    The global entry
1286  * @param graph     The current entry
1287  */
1288 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1289 {
1290         (void) global;
1291         if (graph->is_deleted) {
1292                 /* deleted, ignore */
1293                 return;
1294         }
1295
1296         if (graph->irg) {
1297                 /* count the nodes in the graph */
1298                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1299
1300                 if (graph->is_leaf_call == LCS_UNKNOWN)
1301                         graph->is_leaf_call = LCS_LEAF_CALL;
1302         }  /* if */
1303 }  /* update_graph_stat_2 */
1304
1305 /**
1306  * Register a dumper.
1307  */
1308 static void stat_register_dumper(const dumper_t *dumper) {
1309         dumper_t *p = XMALLOC(dumper_t);
1310
1311         memcpy(p, dumper, sizeof(*p));
1312
1313         p->next        = status->dumper;
1314         p->status      = status;
1315         status->dumper = p;
1316
1317         /* FIXME: memory leak */
1318 }  /* stat_register_dumper */
1319
1320 /**
1321  * Dumps the statistics of an IR graph.
1322  */
1323 static void stat_dump_graph(graph_entry_t *entry) {
1324         dumper_t *dumper;
1325
1326         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1327                 if (dumper->dump_graph)
1328                         dumper->dump_graph(dumper, entry);
1329         }  /* for */
1330 }  /* stat_dump_graph */
1331
1332 /**
1333  * Calls all registered dumper functions.
1334  */
1335 static void stat_dump_registered(graph_entry_t *entry) {
1336         dumper_t *dumper;
1337
1338         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1339                 if (dumper->func_map) {
1340                         dump_graph_FUNC func;
1341
1342                         foreach_pset(dumper->func_map, func)
1343                                 func(dumper, entry);
1344                 }  /* if */
1345         }  /* for */
1346 }  /* stat_dump_registered */
1347
1348 /**
1349  * Dumps a constant table.
1350  */
1351 static void stat_dump_consts(const constant_info_t *tbl) {
1352         dumper_t *dumper;
1353
1354         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1355                 if (dumper->dump_const_tbl)
1356                         dumper->dump_const_tbl(dumper, tbl);
1357         }  /* for */
1358 }  /* stat_dump_consts */
1359
1360 /**
1361  * Dumps the parameter distribution
1362  */
1363 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global) {
1364         dumper_t *dumper;
1365
1366         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1367                 if (dumper->dump_param_tbl)
1368                         dumper->dump_param_tbl(dumper, tbl, global);
1369         }  /* for */
1370 }  /* stat_dump_param_tbl */
1371
1372 /**
1373  * Dumps the optimization counter
1374  */
1375 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len) {
1376         dumper_t *dumper;
1377
1378         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1379                 if (dumper->dump_opt_cnt)
1380                         dumper->dump_opt_cnt(dumper, tbl, len);
1381         }  /* for */
1382 }  /* stat_dump_opt_cnt */
1383
1384 /**
1385  * Initialize the dumper.
1386  */
1387 static void stat_dump_init(const char *name) {
1388         dumper_t *dumper;
1389
1390         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1391                 if (dumper->init)
1392                         dumper->init(dumper, name);
1393         }  /* for */
1394 }  /* stat_dump_init */
1395
1396 /**
1397  * Finish the dumper.
1398  */
1399 static void stat_dump_finish(void) {
1400         dumper_t *dumper;
1401
1402         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1403                 if (dumper->finish)
1404                         dumper->finish(dumper);
1405         }  /* for */
1406 }  /* stat_dump_finish */
1407
1408 /**
1409  * Register an additional function for all dumper.
1410  */
1411 void stat_register_dumper_func(dump_graph_FUNC func) {
1412         dumper_t *dumper;
1413
1414         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1415                 if (! dumper->func_map)
1416                         dumper->func_map = pset_new_ptr(3);
1417                 pset_insert_ptr(dumper->func_map, func);
1418         }  /* for */
1419 }  /* stat_register_dumper_func */
1420
1421 /* ---------------------------------------------------------------------- */
1422
1423 /*
1424  * Helper: get an ir_op from an opcode.
1425  */
1426 ir_op *stat_get_op_from_opcode(ir_opcode code) {
1427         return opcode_find_entry(code, status->ir_op_hash);
1428 }  /* stat_get_op_from_opcode */
1429
1430 /**
1431  * Hook: A new IR op is registered.
1432  *
1433  * @param ctx  the hook context
1434  * @param op   the new IR opcode that was created.
1435  */
1436 static void stat_new_ir_op(void *ctx, ir_op *op) {
1437         (void) ctx;
1438         if (! status->stat_options)
1439                 return;
1440
1441         STAT_ENTER;
1442         {
1443                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1444
1445                 /* execute for side effect :-) */
1446                 (void)opcode_get_entry(op, graph->opcode_hash);
1447
1448                 pset_insert(status->ir_op_hash, op, op->code);
1449         }
1450         STAT_LEAVE;
1451 }  /* stat_new_ir_op */
1452
1453 /**
1454  * Hook: An IR op is freed.
1455  *
1456  * @param ctx  the hook context
1457  * @param op   the IR opcode that is freed
1458  */
1459 static void stat_free_ir_op(void *ctx, ir_op *op) {
1460         (void) ctx;
1461         (void) op;
1462         if (! status->stat_options)
1463                 return;
1464
1465         STAT_ENTER;
1466         {
1467         }
1468         STAT_LEAVE;
1469 }  /* stat_free_ir_op */
1470
1471 /**
1472  * Hook: A new node is created.
1473  *
1474  * @param ctx   the hook context
1475  * @param irg   the IR graph on which the node is created
1476  * @param node  the new IR node that was created
1477  */
1478 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) {
1479         (void) ctx;
1480         (void) irg;
1481         if (! status->stat_options)
1482                 return;
1483
1484         /* do NOT count during dead node elimination */
1485         if (status->in_dead_node_elim)
1486                 return;
1487
1488         STAT_ENTER;
1489         {
1490                 node_entry_t *entry;
1491                 graph_entry_t *graph;
1492                 ir_op *op = stat_get_irn_op(node);
1493
1494                 /* increase global value */
1495                 graph = graph_get_entry(NULL, status->irg_hash);
1496                 entry = opcode_get_entry(op, graph->opcode_hash);
1497                 cnt_inc(&entry->new_node);
1498
1499                 /* increase local value */
1500                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1501                 entry = opcode_get_entry(op, graph->opcode_hash);
1502                 cnt_inc(&entry->new_node);
1503         }
1504         STAT_LEAVE;
1505 }  /* stat_new_node */
1506
1507 /**
1508  * Hook: A node is changed into a Id node
1509  *
1510  * @param ctx   the hook context
1511  * @param node  the IR node that will be turned into an ID
1512  */
1513 static void stat_turn_into_id(void *ctx, ir_node *node) {
1514         (void) ctx;
1515         if (! status->stat_options)
1516                 return;
1517
1518         STAT_ENTER;
1519         {
1520                 node_entry_t *entry;
1521                 graph_entry_t *graph;
1522                 ir_op *op = stat_get_irn_op(node);
1523
1524                 /* increase global value */
1525                 graph = graph_get_entry(NULL, status->irg_hash);
1526                 entry = opcode_get_entry(op, graph->opcode_hash);
1527                 cnt_inc(&entry->into_Id);
1528
1529                 /* increase local value */
1530                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1531                 entry = opcode_get_entry(op, graph->opcode_hash);
1532                 cnt_inc(&entry->into_Id);
1533         }
1534         STAT_LEAVE;
1535 }  /* stat_turn_into_id */
1536
1537 /**
1538  * Hook: A node is normalized
1539  *
1540  * @param ctx   the hook context
1541  * @param node  the IR node that was normalized
1542  */
1543 static void stat_normalize(void *ctx, ir_node *node) {
1544         (void) ctx;
1545         if (! status->stat_options)
1546                 return;
1547
1548         STAT_ENTER;
1549         {
1550                 node_entry_t *entry;
1551                 graph_entry_t *graph;
1552                 ir_op *op = stat_get_irn_op(node);
1553
1554                 /* increase global value */
1555                 graph = graph_get_entry(NULL, status->irg_hash);
1556                 entry = opcode_get_entry(op, graph->opcode_hash);
1557                 cnt_inc(&entry->normalized);
1558
1559                 /* increase local value */
1560                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1561                 entry = opcode_get_entry(op, graph->opcode_hash);
1562                 cnt_inc(&entry->normalized);
1563         }
1564         STAT_LEAVE;
1565 }  /* stat_normalize */
1566
1567 /**
1568  * Hook: A new graph was created
1569  *
1570  * @param ctx  the hook context
1571  * @param irg  the new IR graph that was created
1572  * @param ent  the entity of this graph
1573  */
1574 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent) {
1575         (void) ctx;
1576         if (! status->stat_options)
1577                 return;
1578
1579         STAT_ENTER;
1580         {
1581                 /* execute for side effect :-) */
1582                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1583
1584                 graph->ent           = ent;
1585                 graph->is_deleted    = 0;
1586                 graph->is_leaf       = 0;
1587                 graph->is_leaf_call  = 0;
1588                 graph->is_recursive  = 0;
1589                 graph->is_chain_call = 0;
1590                 graph->is_strict     = 1;
1591                 graph->is_analyzed   = 0;
1592         }
1593         STAT_LEAVE;
1594 }  /* stat_new_graph */
1595
1596 /**
1597  * Hook: A graph will be deleted
1598  *
1599  * @param ctx  the hook context
1600  * @param irg  the IR graph that will be deleted
1601  *
1602  * Note that we still hold the information for this graph
1603  * in our hash maps, only a flag is set which prevents this
1604  * information from being changed, it's "frozen" from now.
1605  */
1606 static void stat_free_graph(void *ctx, ir_graph *irg) {
1607         (void) ctx;
1608         if (! status->stat_options)
1609                 return;
1610
1611         STAT_ENTER;
1612         {
1613                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1614                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1615
1616                 graph->is_deleted = 1;
1617
1618                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1619                         /* count the nodes of the graph yet, it will be destroyed later */
1620                         update_graph_stat(global, graph);
1621                 }  /* if */
1622         }
1623         STAT_LEAVE;
1624 }  /* stat_free_graph */
1625
1626 /**
1627  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1628  *
1629  * @param ctx  the hook context
1630  * @param irg  the IR graph that will be walked
1631  * @param pre  the pre walker
1632  * @param post the post walker
1633  */
1634 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1635 {
1636         (void) ctx;
1637         (void) pre;
1638         (void) post;
1639         if (! status->stat_options)
1640                 return;
1641
1642         STAT_ENTER_SINGLE;
1643         {
1644                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1645
1646                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1647         }
1648         STAT_LEAVE;
1649 }  /* stat_irg_walk */
1650
1651 /**
1652  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1653  *
1654  * @param ctx  the hook context
1655  * @param irg  the IR graph that will be walked
1656  * @param pre  the pre walker
1657  * @param post the post walker
1658  */
1659 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1660 {
1661         /* for now, do NOT differentiate between blockwise and normal */
1662         stat_irg_walk(ctx, irg, pre, post);
1663 }  /* stat_irg_walk_blkwise */
1664
1665 /**
1666  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1667  *
1668  * @param ctx  the hook context
1669  * @param irg  the IR graph that will be walked
1670  * @param node the IR node
1671  * @param pre  the pre walker
1672  * @param post the post walker
1673  */
1674 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1675 {
1676         (void) ctx;
1677         (void) node;
1678         (void) pre;
1679         (void) post;
1680         if (! status->stat_options)
1681                 return;
1682
1683         STAT_ENTER_SINGLE;
1684         {
1685                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1686
1687                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1688         }
1689         STAT_LEAVE;
1690 }  /* stat_irg_block_walk */
1691
1692 /**
1693  * Called for every node that is removed due to an optimization.
1694  *
1695  * @param n     the IR node that will be removed
1696  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1697  * @param kind  the optimization kind
1698  */
1699 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind) {
1700         opt_entry_t *entry;
1701         ir_op *op = stat_get_irn_op(n);
1702
1703         /* ignore CSE for Constants */
1704         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1705                 return;
1706
1707         /* increase global value */
1708         entry = opt_get_entry(op, hmap);
1709         cnt_inc(&entry->count);
1710 }  /* removed_due_opt */
1711
1712 /**
1713  * Hook: Some nodes were optimized into some others due to an optimization.
1714  *
1715  * @param ctx  the hook context
1716  */
1717 static void stat_merge_nodes(
1718     void *ctx,
1719     ir_node **new_node_array, int new_num_entries,
1720     ir_node **old_node_array, int old_num_entries,
1721     hook_opt_kind opt)
1722 {
1723         (void) ctx;
1724         if (! status->stat_options)
1725                 return;
1726
1727         STAT_ENTER;
1728         {
1729                 int i, j;
1730                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1731
1732                 cnt_inc(&status->num_opts[opt]);
1733                 if (status->reassoc_run)
1734                         opt = HOOK_OPT_REASSOC;
1735
1736                 for (i = 0; i < old_num_entries; ++i) {
1737                         /* nodes might be in new and old, so if we found a node
1738                            in both sets, this one  is NOT removed */
1739                         for (j = 0; j < new_num_entries; ++j) {
1740                                 if (old_node_array[i] == new_node_array[j])
1741                                         break;
1742                         }  /* for */
1743                         if (j >= new_num_entries) {
1744                                 int xopt = opt;
1745
1746                                 /* sometimes we did not detect, that it is replaced by a Const */
1747                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1748                                         ir_op *op = get_irn_op(new_node_array[0]);
1749
1750                                         if (op == op_Const || op == op_SymConst)
1751                                                 xopt = HOOK_OPT_CONFIRM_C;
1752                                 }  /* if */
1753
1754                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], xopt);
1755                         }  /* if */
1756                 }  /* for */
1757         }
1758         STAT_LEAVE;
1759 }  /* stat_merge_nodes */
1760
1761 /**
1762  * Hook: Reassociation is started/stopped.
1763  *
1764  * @param ctx   the hook context
1765  * @param flag  if non-zero, reassociation is started else stopped
1766  */
1767 static void stat_reassociate(void *ctx, int flag) {
1768         (void) ctx;
1769         if (! status->stat_options)
1770                 return;
1771
1772         STAT_ENTER;
1773         {
1774                 status->reassoc_run = flag;
1775         }
1776         STAT_LEAVE;
1777 }  /* stat_reassociate */
1778
1779 /**
1780  * Hook: A node was lowered into other nodes
1781  *
1782  * @param ctx  the hook context
1783  * @param node the IR node that will be lowered
1784  */
1785 static void stat_lower(void *ctx, ir_node *node) {
1786         (void) ctx;
1787         if (! status->stat_options)
1788                 return;
1789
1790         STAT_ENTER;
1791         {
1792                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1793
1794                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1795         }
1796         STAT_LEAVE;
1797 }  /* stat_lower */
1798
1799 /**
1800  * Hook: A graph was inlined.
1801  *
1802  * @param ctx  the hook context
1803  * @param call the IR call that will re changed into the body of
1804  *             the called IR graph
1805  * @param called_irg  the IR graph representing the called routine
1806  */
1807 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1808 {
1809         (void) ctx;
1810         if (! status->stat_options)
1811                 return;
1812
1813         STAT_ENTER;
1814         {
1815                 ir_graph *irg = get_irn_irg(call);
1816                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1817                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1818
1819                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1820                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1821         }
1822         STAT_LEAVE;
1823 }  /* stat_inline */
1824
1825 /**
1826  * Hook: A graph with tail-recursions was optimized.
1827  *
1828  * @param ctx  the hook context
1829  */
1830 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) {
1831         (void) ctx;
1832         if (! status->stat_options)
1833                 return;
1834
1835         STAT_ENTER;
1836         {
1837                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1838
1839                 graph->num_tail_recursion += n_calls;
1840         }
1841         STAT_LEAVE;
1842 }  /* stat_tail_rec */
1843
1844 /**
1845  * Strength reduction was performed on an iteration variable.
1846  *
1847  * @param ctx  the hook context
1848  */
1849 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong) {
1850         (void) ctx;
1851         if (! status->stat_options)
1852                 return;
1853
1854         STAT_ENTER;
1855         {
1856                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1857                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1858
1859                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1860         }
1861         STAT_LEAVE;
1862 }  /* stat_strength_red */
1863
1864 /**
1865  * Hook: Start/Stop the dead node elimination.
1866  *
1867  * @param ctx  the hook context
1868  */
1869 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start) {
1870         (void) ctx;
1871         (void) irg;
1872         if (! status->stat_options)
1873                 return;
1874
1875         status->in_dead_node_elim = (start != 0);
1876 }  /* stat_dead_node_elim */
1877
1878 /**
1879  * Hook: if-conversion was tried.
1880  */
1881 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1882                                int pos, ir_node *mux, if_result_t reason)
1883 {
1884         (void) context;
1885         (void) phi;
1886         (void) pos;
1887         (void) mux;
1888         if (! status->stat_options)
1889                 return;
1890
1891         STAT_ENTER;
1892         {
1893                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1894
1895                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1896         }
1897         STAT_LEAVE;
1898 }  /* stat_if_conversion */
1899
1900 /**
1901  * Hook: real function call was optimized.
1902  */
1903 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1904 {
1905         (void) context;
1906         (void) call;
1907         if (! status->stat_options)
1908                 return;
1909
1910         STAT_ENTER;
1911         {
1912                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1913
1914                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1915         }
1916         STAT_LEAVE;
1917 }  /* stat_func_call */
1918
1919 /**
1920  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1921  *
1922  * @param ctx  the hook context
1923  */
1924 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul) {
1925         (void) ctx;
1926         if (! status->stat_options)
1927                 return;
1928
1929         STAT_ENTER;
1930         {
1931                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1932                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1933         }
1934         STAT_LEAVE;
1935 }  /* stat_arch_dep_replace_mul_with_shifts */
1936
1937 /**
1938  * Hook: A division by const was replaced.
1939  *
1940  * @param ctx   the hook context
1941  * @param node  the division node that will be optimized
1942  */
1943 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node) {
1944         (void) ctx;
1945         if (! status->stat_options)
1946                 return;
1947
1948         STAT_ENTER;
1949         {
1950                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1951                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1952         }
1953         STAT_LEAVE;
1954 }  /* stat_arch_dep_replace_division_by_const */
1955
1956 /*
1957  * Update the register pressure of a block.
1958  *
1959  * @param irg        the irg containing the block
1960  * @param block      the block for which the reg pressure should be set
1961  * @param pressure   the pressure
1962  * @param class_name the name of the register class
1963  */
1964 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1965 {
1966         if (! status->stat_options)
1967                 return;
1968
1969         STAT_ENTER;
1970         {
1971                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1972                 be_block_entry_t     *block_ent;
1973                 reg_pressure_entry_t *rp_ent;
1974
1975                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1976                 rp_ent    = obstack_alloc(&status->be_data, sizeof(*rp_ent));
1977                 memset(rp_ent, 0, sizeof(*rp_ent));
1978
1979                 rp_ent->class_name = class_name;
1980                 rp_ent->pressure   = pressure;
1981
1982                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
1983         }
1984         STAT_LEAVE;
1985 }  /* stat_be_block_regpressure */
1986
1987 /**
1988  * Update the distribution of ready nodes of a block
1989  *
1990  * @param irg        the irg containing the block
1991  * @param block      the block for which the reg pressure should be set
1992  * @param num_ready  the number of ready nodes
1993  */
1994 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
1995 {
1996         if (! status->stat_options)
1997                 return;
1998
1999         STAT_ENTER;
2000         {
2001                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2002                 be_block_entry_t *block_ent;
2003
2004                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2005
2006                 /* increase the counter of corresponding number of ready nodes */
2007                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2008         }
2009         STAT_LEAVE;
2010 }  /* stat_be_block_sched_ready */
2011
2012 /**
2013  * Update the permutation statistic of a block.
2014  *
2015  * @param class_name the name of the register class
2016  * @param n_regs     number of registers in the register class
2017  * @param perm       the perm node
2018  * @param block      the block containing the perm
2019  * @param size       the size of the perm
2020  * @param real_size  number of pairs with different registers
2021  */
2022 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2023                              int size, int real_size)
2024 {
2025         if (! status->stat_options)
2026                 return;
2027
2028         STAT_ENTER;
2029         {
2030                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2031                 be_block_entry_t   *block_ent;
2032                 perm_class_entry_t *pc_ent;
2033                 perm_stat_entry_t  *ps_ent;
2034
2035                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2036                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2037                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2038
2039                 pc_ent->n_regs = n_regs;
2040
2041                 /* update information */
2042                 ps_ent->size      = size;
2043                 ps_ent->real_size = real_size;
2044         }
2045         STAT_LEAVE;
2046 }  /* stat_be_block_stat_perm */
2047
2048 /**
2049  * Update the permutation statistic of a single perm.
2050  *
2051  * @param class_name the name of the register class
2052  * @param perm       the perm node
2053  * @param block      the block containing the perm
2054  * @param is_chain   1 if chain, 0 if cycle
2055  * @param size       length of the cycle/chain
2056  * @param n_ops      the number of ops representing this cycle/chain after lowering
2057  */
2058 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2059                                   int is_chain, int size, int n_ops)
2060 {
2061         if (! status->stat_options)
2062                 return;
2063
2064         STAT_ENTER;
2065         {
2066                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2067                 be_block_entry_t   *block_ent;
2068                 perm_class_entry_t *pc_ent;
2069                 perm_stat_entry_t  *ps_ent;
2070
2071                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2072                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2073                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2074
2075                 if (is_chain) {
2076                         ps_ent->n_copies += n_ops;
2077                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2078                 } else {
2079                         ps_ent->n_exchg += n_ops;
2080                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2081                 }  /* if */
2082         }
2083         STAT_LEAVE;
2084 }  /* stat_be_block_stat_permcycle */
2085
2086 /* Dumps a statistics snapshot. */
2087 void stat_dump_snapshot(const char *name, const char *phase)
2088 {
2089         char fname[2048];
2090         const char *p;
2091         int l;
2092
2093         if (! status->stat_options)
2094                 return;
2095
2096         STAT_ENTER;
2097         {
2098                 graph_entry_t *entry;
2099                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2100
2101                 /*
2102                  * The constant counter is only global, so we clear it here.
2103                  * Note that it does NOT contain the constants in DELETED
2104                  * graphs due to this.
2105                  */
2106                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2107                         stat_const_clear(status);
2108
2109                 /* build the name */
2110                 p = strrchr(name, '/');
2111 #ifdef _WIN32
2112                 {
2113                         const char *q;
2114
2115                         q = strrchr(name, '\\');
2116
2117                         /* NULL might be not the smallest pointer */
2118                         if (q && (!p || q > p))
2119                                 p = q;
2120                 }
2121 #endif /* _WIN32 */
2122                 if (p) {
2123                         ++p;
2124                         l = p - name;
2125
2126                         if (l > (int) (sizeof(fname) - 1))
2127                                 l = sizeof(fname) - 1;
2128
2129                         memcpy(fname, name, l);
2130                         fname[l] = '\0';
2131                 } else {
2132                         fname[0] = '\0';
2133                         p = name;
2134                 }  /* if */
2135                 strncat(fname, "firmstat-", sizeof(fname)-1);
2136                 strncat(fname, phase, sizeof(fname)-1);
2137                 strncat(fname, "-", sizeof(fname)-1);
2138                 strncat(fname, p, sizeof(fname)-1);
2139
2140                 stat_dump_init(fname);
2141
2142                 /* calculate the graph statistics */
2143                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2144                         if (entry->irg == NULL) {
2145                                 /* special entry for the global count */
2146                                 continue;
2147                         }  /* if */
2148                         if (! entry->is_deleted) {
2149                                 /* the graph is still alive, count the nodes on it */
2150                                 update_graph_stat(global, entry);
2151                         }  /* if */
2152                 }  /* for */
2153
2154                 /* some calculations are dependent, we pushed them on the wait_q */
2155                 while (! pdeq_empty(status->wait_q)) {
2156                         entry = pdeq_getr(status->wait_q);
2157
2158                         update_graph_stat_2(global, entry);
2159                 }  /* while */
2160
2161                 /* dump per graph */
2162                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2163                         if (entry->irg == NULL) {
2164                                 /* special entry for the global count */
2165                                 continue;
2166                         }  /* if */
2167
2168                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2169                                 stat_dump_graph(entry);
2170                                 stat_dump_registered(entry);
2171                         }  /* if */
2172
2173                         if (! entry->is_deleted) {
2174                                 /* clear the counter that are not accumulated */
2175                                 graph_clear_entry(entry, 0);
2176                         }  /* if */
2177                 }  /* for */
2178
2179                 /* dump global */
2180                 stat_dump_graph(global);
2181
2182                 /* dump the const info */
2183                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2184                         stat_dump_consts(&status->const_info);
2185
2186                 /* dump the parameter distribution */
2187                 stat_dump_param_tbl(status->dist_param_cnt, global);
2188
2189                 /* dump the optimization counter and clear them */
2190                 stat_dump_opt_cnt(status->num_opts, ARR_SIZE(status->num_opts));
2191                 clear_optimization_counter();
2192
2193                 stat_dump_finish();
2194
2195                 stat_finish_pattern_history(fname);
2196
2197                 /* clear the global counters here */
2198                 {
2199                         node_entry_t *entry;
2200
2201                         for (entry = pset_first(global->opcode_hash); entry; entry = pset_next(global->opcode_hash)) {
2202                                 opcode_clear_entry(entry);
2203                         }  /* for */
2204                         /* clear all global counter */
2205                         graph_clear_entry(global, /*all=*/1);
2206                 }
2207         }
2208         STAT_LEAVE;
2209 }  /* stat_dump_snapshot */
2210
2211 struct pass_t {
2212         ir_prog_pass_t pass;
2213         const char     *fname;
2214         const char     *phase;
2215 };
2216
2217 /**
2218  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2219  */
2220 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context) {
2221         struct pass_t *pass = context;
2222
2223         (void)irp;
2224         stat_dump_snapshot(pass->fname, pass->phase);
2225         return 0;
2226 }  /* stat_dump_snapshot_wrapper */
2227
2228 /**
2229  * Ensure that no verifier is run from the wrapper.
2230  */
2231 static int no_verify(ir_prog *prog, void *ctx)
2232 {
2233         (void)prog;
2234         (void)ctx;
2235         return 0;
2236 }
2237
2238 /**
2239  * Ensure that no dumper is run from the wrapper.
2240  */
2241 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2242 {
2243         (void)prog;
2244         (void)ctx;
2245         (void)idx;
2246 }
2247
2248 /* create an ir_pog pass */
2249 ir_prog_pass_t *stat_dump_snapshot_pass(
2250         const char *name, const char *fname, const char *phase) {
2251         struct pass_t *pass = XMALLOCZ(struct pass_t);
2252
2253         def_prog_pass_constructor(
2254                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2255         pass->fname = fname;
2256         pass->phase = phase;
2257
2258         /* no dump/verify */
2259         pass->pass.dump_irprog   = no_dump;
2260         pass->pass.verify_irprog = no_verify;
2261
2262         return &pass->pass;
2263 }  /* stat_dump_snapshot_pass */
2264
2265 /** the hook entries for the Firm statistics module */
2266 static hook_entry_t stat_hooks[hook_last];
2267
2268 /* initialize the statistics module. */
2269 void firm_init_stat(unsigned enable_options)
2270 {
2271 #define X(a)  a, sizeof(a)-1
2272 #define HOOK(h, fkt) \
2273         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2274         unsigned num = 0;
2275
2276         if (! (enable_options & FIRMSTAT_ENABLED))
2277                 return;
2278
2279         status = XMALLOCZ(stat_info_t);
2280
2281         /* enable statistics */
2282         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2283
2284         /* register all hooks */
2285         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2286         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2287         HOOK(hook_new_node,                           stat_new_node);
2288         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2289         HOOK(hook_normalize,                          stat_normalize);
2290         HOOK(hook_new_graph,                          stat_new_graph);
2291         HOOK(hook_free_graph,                         stat_free_graph);
2292         HOOK(hook_irg_walk,                           stat_irg_walk);
2293         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2294         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2295         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2296         HOOK(hook_reassociate,                        stat_reassociate);
2297         HOOK(hook_lower,                              stat_lower);
2298         HOOK(hook_inline,                             stat_inline);
2299         HOOK(hook_tail_rec,                           stat_tail_rec);
2300         HOOK(hook_strength_red,                       stat_strength_red);
2301         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2302         HOOK(hook_if_conversion,                      stat_if_conversion);
2303         HOOK(hook_func_call,                          stat_func_call);
2304         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2305         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2306
2307         obstack_init(&status->cnts);
2308         obstack_init(&status->be_data);
2309
2310         /* create the hash-tables */
2311         status->irg_hash   = new_pset(graph_cmp, 8);
2312         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2313
2314         /* create the wait queue */
2315         status->wait_q     = new_pdeq();
2316
2317         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2318                 /* build the pseudo-ops */
2319
2320                 _op_Phi0.code    = --num;
2321                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2322
2323                 _op_PhiM.code    = --num;
2324                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2325
2326                 _op_ProjM.code   = --num;
2327                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2328
2329                 _op_MulC.code    = --num;
2330                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2331
2332                 _op_DivC.code    = --num;
2333                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2334
2335                 _op_ModC.code    = --num;
2336                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2337
2338                 _op_DivModC.code = --num;
2339                 _op_DivModC.name = new_id_from_chars(X("DivModC"));
2340
2341                 _op_QuotC.code   = --num;
2342                 _op_QuotC.name   = new_id_from_chars(X("QuotC"));
2343
2344                 status->op_Phi0    = &_op_Phi0;
2345                 status->op_PhiM    = &_op_PhiM;
2346                 status->op_ProjM   = &_op_ProjM;
2347                 status->op_MulC    = &_op_MulC;
2348                 status->op_DivC    = &_op_DivC;
2349                 status->op_ModC    = &_op_ModC;
2350                 status->op_DivModC = &_op_DivModC;
2351                 status->op_QuotC   = &_op_QuotC;
2352         } else {
2353                 status->op_Phi0    = NULL;
2354                 status->op_PhiM    = NULL;
2355                 status->op_ProjM   = NULL;
2356                 status->op_MulC    = NULL;
2357                 status->op_DivC    = NULL;
2358                 status->op_ModC    = NULL;
2359                 status->op_DivModC = NULL;
2360                 status->op_QuotC   = NULL;
2361         }  /* if */
2362
2363         /* for Florian: count the Sel depth */
2364         if (enable_options & FIRMSTAT_COUNT_SELS) {
2365                 _op_SelSel.code    = --num;
2366                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2367
2368                 _op_SelSelSel.code = --num;
2369                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2370
2371                 status->op_SelSel    = &_op_SelSel;
2372                 status->op_SelSelSel = &_op_SelSelSel;
2373         } else {
2374                 status->op_SelSel    = NULL;
2375                 status->op_SelSelSel = NULL;
2376         }  /* if */
2377
2378         /* register the dumper */
2379         stat_register_dumper(&simple_dumper);
2380
2381         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2382                 stat_register_dumper(&csv_dumper);
2383
2384         /* initialize the pattern hash */
2385         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2386
2387         /* initialize the Const options */
2388         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2389                 stat_init_const_cnt(status);
2390
2391         /* distribution table for parameter counts */
2392         status->dist_param_cnt = stat_new_int_distrib_tbl();
2393
2394         clear_optimization_counter();
2395
2396 #undef HOOK
2397 #undef X
2398 }  /* firm_init_stat */
2399
2400 /**
2401  * Frees all dumper structures.
2402  */
2403 static void stat_term_dumper(void) {
2404         dumper_t *dumper, *next_dumper;
2405
2406         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2407                 if (dumper->func_map)
2408                         del_pset(dumper->func_map);
2409
2410                 next_dumper = dumper->next;
2411                 free(dumper);
2412                 dumper = next_dumper;
2413         }  /* for */
2414 }  /* stat_term_dumper */
2415
2416
2417 /* Terminates the statistics module, frees all memory. */
2418 void stat_term(void) {
2419         if (status != (stat_info_t *)&status_disable) {
2420                 obstack_free(&status->be_data, NULL);
2421                 obstack_free(&status->cnts, NULL);
2422
2423                 stat_term_dumper();
2424
2425                 xfree(status);
2426                 status = (stat_info_t *)&status_disable;
2427         }
2428 }  /* stat_term */
2429
2430 /* returns 1 if statistics were initialized, 0 otherwise */
2431 int stat_is_active(void) {
2432         return status != (stat_info_t *)&status_disable;
2433 }  /* stat_is_active */
2434
2435 #else
2436
2437 /* initialize the statistics module. */
2438 void firm_init_stat(unsigned enable_options) {}
2439
2440 /* Dumps a statistics snapshot */
2441 void stat_dump_snapshot(const char *name, const char *phase) {}
2442
2443 /* terminates the statistics module, frees all memory */
2444 void stat_term(void);
2445
2446 #endif /* FIRM_STATISTICS */