Fixed combo by fixing an old artefact from the older libFirm Cmp nodes.
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <string.h>
31
32 #include "irouts.h"
33 #include "irdump.h"
34 #include "hashptr.h"
35 #include "firmstat_t.h"
36 #include "irpass_t.h"
37 #include "pattern.h"
38 #include "dags.h"
39 #include "stat_dmp.h"
40 #include "xmalloc.h"
41 #include "irhooks.h"
42 #include "util.h"
43
44 /*
45  * need this to be static:
46  * Special pseudo Opcodes that we need to count some interesting cases
47  */
48
49 /**
50  * The Phi0, a node that is created during SSA construction
51  */
52 static ir_op _op_Phi0;
53
54 /** The PhiM, just to count memory Phi's. */
55 static ir_op _op_PhiM;
56
57 /** The Mul by Const node. */
58 static ir_op _op_MulC;
59
60 /** The Div by Const node. */
61 static ir_op _op_DivC;
62
63 /** The Div by Const node. */
64 static ir_op _op_ModC;
65
66 /** The memory Proj node. */
67 static ir_op _op_ProjM;
68
69 /** A Sel of a Sel */
70 static ir_op _op_SelSel;
71
72 /** A Sel of a Sel of a Sel */
73 static ir_op _op_SelSelSel;
74
75 /* ---------------------------------------------------------------------------------- */
76
77 /** Marks the begin of a statistic (hook) function. */
78 #define STAT_ENTER    ++status->recursive
79
80 /** Marks the end of a statistic (hook) functions. */
81 #define STAT_LEAVE    --status->recursive
82
83 /** Allows to enter a statistic function only when we are not already in a hook. */
84 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
85
86 /**
87  * global status
88  */
89 static const unsigned status_disable = 0;
90 static stat_info_t *status = (stat_info_t *)&status_disable;
91
92 /**
93  * Compare two elements of the opcode hash.
94  */
95 static int opcode_cmp(const void *elt, const void *key)
96 {
97         const node_entry_t *e1 = (const node_entry_t*)elt;
98         const node_entry_t *e2 = (const node_entry_t*)key;
99
100         return e1->op->code - e2->op->code;
101 }  /* opcode_cmp */
102
103 /**
104  * Compare two elements of the graph hash.
105  */
106 static int graph_cmp(const void *elt, const void *key)
107 {
108         const graph_entry_t *e1 = (const graph_entry_t*)elt;
109         const graph_entry_t *e2 = (const graph_entry_t*)key;
110
111         return e1->irg != e2->irg;
112 }  /* graph_cmp */
113
114 /**
115  * Compare two elements of the optimization hash.
116  */
117 static int opt_cmp(const void *elt, const void *key)
118 {
119         const opt_entry_t *e1 = (const opt_entry_t*)elt;
120         const opt_entry_t *e2 = (const opt_entry_t*)key;
121
122         return e1->op->code != e2->op->code;
123 }  /* opt_cmp */
124
125 /**
126  * Compare two elements of the block/extbb hash.
127  */
128 static int block_cmp(const void *elt, const void *key)
129 {
130         const block_entry_t *e1 = (const block_entry_t*)elt;
131         const block_entry_t *e2 = (const block_entry_t*)key;
132
133         /* it's enough to compare the block number */
134         return e1->block_nr != e2->block_nr;
135 }  /* block_cmp */
136
137 /**
138  * Compare two elements of the be_block hash.
139  */
140 static int be_block_cmp(const void *elt, const void *key)
141 {
142         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
143         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
144
145         return e1->block_nr != e2->block_nr;
146 }  /* be_block_cmp */
147
148 /**
149  * Compare two elements of reg pressure hash.
150  */
151 static int reg_pressure_cmp(const void *elt, const void *key)
152 {
153         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
154         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
155
156         return e1->class_name != e2->class_name;
157 }  /* reg_pressure_cmp */
158
159 /**
160  * Compare two elements of the perm_stat hash.
161  */
162 static int perm_stat_cmp(const void *elt, const void *key)
163 {
164         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
165         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
166
167         return e1->perm != e2->perm;
168 }  /* perm_stat_cmp */
169
170 /**
171  * Compare two elements of the perm_class hash.
172  */
173 static int perm_class_cmp(const void *elt, const void *key)
174 {
175         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
176         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
177
178         return e1->class_name != e2->class_name;
179 }  /* perm_class_cmp */
180
181 /**
182  * Compare two elements of the ir_op hash.
183  */
184 static int opcode_cmp_2(const void *elt, const void *key)
185 {
186         const ir_op *e1 = (const ir_op*)elt;
187         const ir_op *e2 = (const ir_op*)key;
188
189         return e1->code != e2->code;
190 }  /* opcode_cmp_2 */
191
192 /**
193  * Compare two elements of the address_mark set.
194  */
195 static int address_mark_cmp(const void *elt, const void *key, size_t size)
196 {
197         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
198         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
199         (void) size;
200
201         /* compare only the nodes, the rest is used as data container */
202         return e1->node != e2->node;
203 }  /* address_mark_cmp */
204
205 /**
206  * Clear all counter in a node_entry_t.
207  */
208 static void opcode_clear_entry(node_entry_t *elem)
209 {
210         cnt_clr(&elem->cnt_alive);
211         cnt_clr(&elem->new_node);
212         cnt_clr(&elem->into_Id);
213         cnt_clr(&elem->normalized);
214 }  /* opcode_clear_entry */
215
216 /**
217  * Returns the associates node_entry_t for an ir_op (and allocates
218  * one if not yet available).
219  *
220  * @param op    the IR operation
221  * @param hmap  a hash map containing ir_op* -> node_entry_t*
222  */
223 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
224 {
225         node_entry_t key;
226         node_entry_t *elem;
227
228         key.op = op;
229
230         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
231         if (elem)
232                 return elem;
233
234         elem = OALLOCZ(&status->cnts, node_entry_t);
235
236         /* clear counter */
237         opcode_clear_entry(elem);
238
239         elem->op = op;
240
241         return (node_entry_t*)pset_insert(hmap, elem, op->code);
242 }  /* opcode_get_entry */
243
244 /**
245  * Returns the associates ir_op for an opcode
246  *
247  * @param code  the IR opcode
248  * @param hmap  the hash map containing opcode -> ir_op*
249  */
250 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
251 {
252         ir_op key;
253
254         key.code = code;
255         return (ir_op*)pset_find(hmap, &key, code);
256 }  /* opcode_find_entry */
257
258 /**
259  * Clears all counter in a graph_entry_t.
260  *
261  * @param elem  the graph entry
262  * @param all   if non-zero, clears all counters, else leave accumulated ones
263  */
264 static void graph_clear_entry(graph_entry_t *elem, int all)
265 {
266         int i;
267
268         /* clear accumulated / non-accumulated counter */
269         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
270                 cnt_clr(&elem->cnt[i]);
271         }  /* for */
272
273         if (elem->block_hash) {
274                 del_pset(elem->block_hash);
275                 elem->block_hash = NULL;
276         }  /* if */
277
278         if (elem->extbb_hash) {
279                 del_pset(elem->extbb_hash);
280                 elem->extbb_hash = NULL;
281         }  /* if */
282
283         obstack_free(&elem->recalc_cnts, NULL);
284         obstack_init(&elem->recalc_cnts);
285 }  /* graph_clear_entry */
286
287 /**
288  * Returns the associated graph_entry_t for an IR graph.
289  *
290  * @param irg   the IR graph, NULL for the global counter
291  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
292  */
293 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
294 {
295         graph_entry_t key;
296         graph_entry_t *elem;
297         size_t i;
298
299         key.irg = irg;
300
301         elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg));
302
303         if (elem) {
304                 /* create hash map backend block information */
305                 if (! elem->be_block_hash)
306                         elem->be_block_hash = new_pset(be_block_cmp, 5);
307
308                 return elem;
309         }  /* if */
310
311         /* allocate a new one */
312         elem = OALLOCZ(&status->cnts, graph_entry_t);
313         obstack_init(&elem->recalc_cnts);
314
315         /* clear counter */
316         graph_clear_entry(elem, 1);
317
318         /* new hash table for opcodes here  */
319         elem->opcode_hash   = new_pset(opcode_cmp, 5);
320         elem->address_mark  = new_set(address_mark_cmp, 5);
321         elem->irg           = irg;
322
323         /* these hash tables are created on demand */
324         elem->block_hash = NULL;
325         elem->extbb_hash = NULL;
326
327         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
328                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
329
330         return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg));
331 }  /* graph_get_entry */
332
333 /**
334  * Clear all counter in an opt_entry_t.
335  */
336 static void opt_clear_entry(opt_entry_t *elem)
337 {
338         cnt_clr(&elem->count);
339 }  /* opt_clear_entry */
340
341 /**
342  * Returns the associated opt_entry_t for an IR operation.
343  *
344  * @param op    the IR operation
345  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
346  */
347 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
348 {
349         opt_entry_t key;
350         opt_entry_t *elem;
351
352         key.op = op;
353
354         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
355         if (elem)
356                 return elem;
357
358         elem = OALLOCZ(&status->cnts, opt_entry_t);
359
360         /* clear new counter */
361         opt_clear_entry(elem);
362
363         elem->op = op;
364
365         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
366 }  /* opt_get_entry */
367
368 /**
369  * clears all counter in a block_entry_t
370  */
371 static void block_clear_entry(block_entry_t *elem)
372 {
373         int i;
374
375         for (i = 0; i < _bcnt_last; ++i)
376                 cnt_clr(&elem->cnt[i]);
377 }  /* block_clear_entry */
378
379 /**
380  * Returns the associated block_entry_t for an block.
381  *
382  * @param block_nr  an IR  block number
383  * @param hmap      a hash map containing long -> block_entry_t
384  */
385 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
386 {
387         block_entry_t key;
388         block_entry_t *elem;
389
390         key.block_nr = block_nr;
391
392         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
393         if (elem)
394                 return elem;
395
396         elem = OALLOCZ(obst, block_entry_t);
397
398         /* clear new counter */
399         block_clear_entry(elem);
400
401         elem->block_nr = block_nr;
402
403         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
404 }  /* block_get_entry */
405
406 /**
407  * Clear all sets in be_block_entry_t.
408  */
409 static void be_block_clear_entry(be_block_entry_t *elem)
410 {
411         if (elem->reg_pressure)
412                 del_pset(elem->reg_pressure);
413
414         if (elem->sched_ready)
415                 stat_delete_distrib_tbl(elem->sched_ready);
416
417         if (elem->perm_class_stat)
418                 del_pset(elem->perm_class_stat);
419
420         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
421         elem->sched_ready     = stat_new_int_distrib_tbl();
422         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
423 }  /* be_block_clear_entry */
424
425 /**
426  * Returns the associated be_block_entry_t for an block.
427  *
428  * @param block_nr  an IR  block number
429  * @param hmap      a hash map containing long -> be_block_entry_t
430  */
431 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
432 {
433         be_block_entry_t key;
434         be_block_entry_t *elem;
435
436         key.block_nr = block_nr;
437
438         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
439         if (elem)
440                 return elem;
441
442         elem = OALLOCZ(obst, be_block_entry_t);
443
444         /* clear new counter */
445         be_block_clear_entry(elem);
446
447         elem->block_nr = block_nr;
448
449         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
450 }  /* be_block_get_entry */
451
452 /**
453  * clears all sets in perm_class_entry_t
454  */
455 static void perm_class_clear_entry(perm_class_entry_t *elem)
456 {
457         if (elem->perm_stat)
458                 del_pset(elem->perm_stat);
459
460         elem->perm_stat = new_pset(perm_stat_cmp, 5);
461 }  /* perm_class_clear_entry */
462
463 /**
464  * Returns the associated perm_class entry for a register class.
465  *
466  * @param class_name  the register class name
467  * @param hmap        a hash map containing class_name -> perm_class_entry_t
468  */
469 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
470                                                 hmap_perm_class_entry_t *hmap)
471 {
472         perm_class_entry_t key;
473         perm_class_entry_t *elem;
474
475         key.class_name = class_name;
476
477         elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name));
478         if (elem)
479                 return elem;
480
481         elem = OALLOCZ(obst, perm_class_entry_t);
482
483         /* clear new counter */
484         perm_class_clear_entry(elem);
485
486         elem->class_name = class_name;
487
488         return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name));
489 }  /* perm_class_get_entry */
490
491 /**
492  * clears all sets in perm_stat_entry_t
493  */
494 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
495 {
496         if (elem->chains)
497                 stat_delete_distrib_tbl(elem->chains);
498
499         if (elem->cycles)
500                 stat_delete_distrib_tbl(elem->cycles);
501
502         elem->chains = stat_new_int_distrib_tbl();
503         elem->cycles = stat_new_int_distrib_tbl();
504 }  /* perm_stat_clear_entry */
505
506 /**
507  * Returns the associated perm_stat entry for a perm.
508  *
509  * @param perm      the perm node
510  * @param hmap      a hash map containing perm -> perm_stat_entry_t
511  */
512 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
513 {
514         perm_stat_entry_t key;
515         perm_stat_entry_t *elem;
516
517         key.perm = perm;
518
519         elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm));
520         if (elem)
521                 return elem;
522
523         elem = OALLOCZ(obst, perm_stat_entry_t);
524
525         /* clear new counter */
526         perm_stat_clear_entry(elem);
527
528         elem->perm = perm;
529
530         return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm));
531 }  /* perm_stat_get_entry */
532
533 /**
534  * Clear optimizations counter,
535  */
536 static void clear_optimization_counter(void)
537 {
538         int i;
539         for (i = 0; i < FS_OPT_MAX; ++i)
540                 cnt_clr(&status->num_opts[i]);
541 }
542
543 /**
544  * Returns the ir_op for an IR-node,
545  * handles special cases and return pseudo op codes.
546  *
547  * @param none  an IR node
548  */
549 static ir_op *stat_get_irn_op(ir_node *node)
550 {
551         ir_op *op = get_irn_op(node);
552         unsigned opc = op->code;
553
554         switch (opc) {
555         case iro_Phi:
556                 if (get_irn_arity(node) == 0) {
557                         /* special case, a Phi0 node, count on extra counter */
558                         op = status->op_Phi0 ? status->op_Phi0 : op;
559                 } else if (get_irn_mode(node) == mode_M) {
560                         /* special case, a Memory Phi node, count on extra counter */
561                         op = status->op_PhiM ? status->op_PhiM : op;
562                 }  /* if */
563                 break;
564         case iro_Proj:
565                 if (get_irn_mode(node) == mode_M) {
566                         /* special case, a Memory Proj node, count on extra counter */
567                         op = status->op_ProjM ? status->op_ProjM : op;
568                 }  /* if */
569                 break;
570         case iro_Mul:
571                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
572                         /* special case, a Multiply by a const, count on extra counter */
573                         op = status->op_MulC ? status->op_MulC : op;
574                 }  /* if */
575                 break;
576         case iro_Div:
577                 if (is_Const(get_Div_right(node))) {
578                         /* special case, a division by a const, count on extra counter */
579                         op = status->op_DivC ? status->op_DivC : op;
580                 }  /* if */
581                 break;
582         case iro_Mod:
583                 if (is_Const(get_Mod_right(node))) {
584                         /* special case, a module by a const, count on extra counter */
585                         op = status->op_ModC ? status->op_ModC : op;
586                 }  /* if */
587                 break;
588         case iro_Sel:
589                 if (is_Sel(get_Sel_ptr(node))) {
590                         /* special case, a Sel of a Sel, count on extra counter */
591                         op = status->op_SelSel ? status->op_SelSel : op;
592                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
593                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
594                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
595                         }  /* if */
596                 }  /* if */
597                 break;
598         default:
599                 ;
600         }  /* switch */
601
602         return op;
603 }  /* stat_get_irn_op */
604
605 /**
606  * update the block counter
607  */
608 static void undate_block_info(ir_node *node, graph_entry_t *graph)
609 {
610         ir_op *op = get_irn_op(node);
611         ir_node *block;
612         block_entry_t *b_entry;
613         int i, arity;
614
615         /* check for block */
616         if (op == op_Block) {
617                 arity = get_irn_arity(node);
618                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
619                 /* mark start end block to allow to filter them out */
620                 if (node == get_irg_start_block(graph->irg))
621                         b_entry->is_start = 1;
622                 else if (node == get_irg_end_block(graph->irg))
623                         b_entry->is_end = 1;
624
625                 /* count all incoming edges */
626                 for (i = 0; i < arity; ++i) {
627                         ir_node *pred = get_irn_n(node, i);
628                         ir_node *other_block = get_nodes_block(pred);
629                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
630
631                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
632                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
633                 }  /* for */
634                 return;
635         }  /* if */
636
637         block   = get_nodes_block(node);
638         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
639
640         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
641                 /* count data Phi per block */
642                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
643         }  /* if */
644
645         /* we have a new node in our block */
646         cnt_inc(&b_entry->cnt[bcnt_nodes]);
647
648         /* don't count keep-alive edges */
649         if (is_End(node))
650                 return;
651
652         arity = get_irn_arity(node);
653
654         for (i = 0; i < arity; ++i) {
655                 ir_node *pred = get_irn_n(node, i);
656                 ir_node *other_block;
657
658                 other_block = get_nodes_block(pred);
659
660                 if (other_block == block)
661                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
662                 else {
663                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
664
665                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
666                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
667                 }  /* if */
668         }  /* for */
669 }  /* undate_block_info */
670
671 /**
672  * Update the extended block counter.
673  */
674 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
675 {
676         ir_op *op = get_irn_op(node);
677         ir_extblk *extbb;
678         extbb_entry_t *eb_entry;
679         int i, arity;
680
681         /* check for block */
682         if (op == op_Block) {
683                 extbb = get_nodes_extbb(node);
684                 arity = get_irn_arity(node);
685                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
686
687                 /* count all incoming edges */
688                 for (i = 0; i < arity; ++i) {
689                         ir_node *pred = get_irn_n(node, i);
690                         ir_extblk *other_extbb = get_nodes_extbb(pred);
691
692                         if (extbb != other_extbb) {
693                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
694
695                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
696                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
697                         }  /* if */
698                 }  /* for */
699                 return;
700         }  /* if */
701
702         extbb    = get_nodes_extbb(node);
703         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
704
705         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
706                 /* count data Phi per extbb */
707                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
708         }  /* if */
709
710         /* we have a new node in our block */
711         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
712
713         /* don't count keep-alive edges */
714         if (is_End(node))
715                 return;
716
717         arity = get_irn_arity(node);
718
719         for (i = 0; i < arity; ++i) {
720                 ir_node *pred = get_irn_n(node, i);
721                 ir_extblk *other_extbb = get_nodes_extbb(pred);
722
723                 if (other_extbb == extbb)
724                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
725                 else {
726                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
727
728                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
729                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
730                 }  /* if */
731         }  /* for */
732 }  /* update_extbb_info */
733
734 /**
735  * Calculates how many arguments of the call are const, updates
736  * param distribution.
737  */
738 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
739 {
740         int i, num_const_args = 0, num_local_adr = 0;
741         int n = get_Call_n_params(call);
742
743         for (i = 0; i < n; ++i) {
744                 ir_node *param = get_Call_param(call, i);
745
746                 if (is_irn_constlike(param))
747                         ++num_const_args;
748                 else if (is_Sel(param)) {
749                         ir_node *base = param;
750
751                         do {
752                                 base = get_Sel_ptr(base);
753                         } while (is_Sel(base));
754
755                         if (base == get_irg_frame(current_ir_graph))
756                                 ++num_local_adr;
757                 }
758
759         }  /* for */
760
761         if (num_const_args > 0)
762                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
763         if (num_const_args == n)
764                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
765         if (num_local_adr > 0)
766                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
767
768         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
769 }  /* analyse_params_of_Call */
770
771 /**
772  * Update info on calls.
773  *
774  * @param call   The call
775  * @param graph  The graph entry containing the call
776  */
777 static void stat_update_call(ir_node *call, graph_entry_t *graph)
778 {
779         ir_node   *block = get_nodes_block(call);
780         ir_node   *ptr = get_Call_ptr(call);
781         ir_entity *ent = NULL;
782         ir_graph  *callee = NULL;
783
784         /*
785          * If the block is bad, the whole subgraph will collapse later
786          * so do not count this call.
787          * This happens in dead code.
788          */
789         if (is_Bad(block))
790                 return;
791
792         cnt_inc(&graph->cnt[gcnt_all_calls]);
793
794         /* found a call, this function is not a leaf */
795         graph->is_leaf = 0;
796
797         if (is_SymConst(ptr)) {
798                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
799                         /* ok, we seems to know the entity */
800                         ent = get_SymConst_entity(ptr);
801                         callee = get_entity_irg(ent);
802
803                         /* it is recursive, if it calls at least once */
804                         if (callee == graph->irg)
805                                 graph->is_recursive = 1;
806                         if (callee == NULL)
807                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
808                 }  /* if */
809         } else {
810                 /* indirect call, be could not predict */
811                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
812
813                 /* NOT a leaf call */
814                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
815         }  /* if */
816
817         /* check, if it's a chain-call: Then, the call-block
818          * must dominate the end block. */
819         {
820                 ir_node *curr = get_irg_end_block(graph->irg);
821                 int depth = get_Block_dom_depth(block);
822
823                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
824                         curr = get_Block_idom(curr);
825
826                         if (! curr || !is_Block(curr))
827                                 break;
828                 }  /* for */
829
830                 if (curr != block)
831                         graph->is_chain_call = 0;
832         }
833
834         /* check, if the callee is a leaf */
835         if (callee) {
836                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
837
838                 if (called->is_analyzed) {
839                         if (! called->is_leaf)
840                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
841                 }  /* if */
842         }  /* if */
843
844         analyse_params_of_Call(graph, call);
845 }  /* stat_update_call */
846
847 /**
848  * Update info on calls for graphs on the wait queue.
849  */
850 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
851 {
852         ir_node   *block = get_nodes_block(call);
853         ir_node   *ptr = get_Call_ptr(call);
854         ir_entity *ent = NULL;
855         ir_graph  *callee = NULL;
856
857         /*
858          * If the block is bad, the whole subgraph will collapse later
859          * so do not count this call.
860          * This happens in dead code.
861          */
862         if (is_Bad(block))
863                 return;
864
865         if (is_SymConst(ptr)) {
866                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
867                         /* ok, we seems to know the entity */
868                         ent = get_SymConst_entity(ptr);
869                         callee = get_entity_irg(ent);
870                 }  /* if */
871         }  /* if */
872
873         /* check, if the callee is a leaf */
874         if (callee) {
875                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
876
877                 assert(called->is_analyzed);
878
879                 if (! called->is_leaf)
880                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
881         } else
882                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
883 }  /* stat_update_call_2 */
884
885 /**
886  * Find the base address and entity of an Sel node.
887  *
888  * @param sel  the node
889  *
890  * @return the base address.
891  */
892 static ir_node *find_base_adr(ir_node *sel)
893 {
894         ir_node *ptr = get_Sel_ptr(sel);
895
896         while (is_Sel(ptr)) {
897                 sel = ptr;
898                 ptr = get_Sel_ptr(sel);
899         }
900         return ptr;
901 }  /* find_base_adr */
902
903 /**
904  * Update info on Load/Store address statistics.
905  */
906 static void stat_update_address(ir_node *node, graph_entry_t *graph)
907 {
908         unsigned opc = get_irn_opcode(node);
909         ir_node *base;
910         ir_graph *irg;
911
912         switch (opc) {
913         case iro_SymConst:
914                 /* a global address */
915                 cnt_inc(&graph->cnt[gcnt_global_adr]);
916                 break;
917         case iro_Sel:
918                 base = find_base_adr(node);
919                 irg = current_ir_graph;
920                 if (base == get_irg_frame(irg)) {
921                         /* a local Variable. */
922                         cnt_inc(&graph->cnt[gcnt_local_adr]);
923                 } else {
924                         /* Pointer access */
925                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
926                                 /* pointer access through parameter, check for THIS */
927                                 ir_entity *ent = get_irg_entity(irg);
928
929                                 if (ent != NULL) {
930                                         ir_type *ent_tp = get_entity_type(ent);
931
932                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
933                                                 if (get_Proj_proj(base) == 0) {
934                                                         /* THIS pointer */
935                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
936                                                         goto end_parameter;
937                                                 }  /* if */
938                                         }  /* if */
939                                 }  /* if */
940                                 /* other parameter */
941                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
942 end_parameter: ;
943                         } else {
944                                 /* unknown Pointer access */
945                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
946                         }  /* if */
947                 }  /* if */
948         default:
949                 ;
950         }  /* switch */
951 }  /* stat_update_address */
952
953 /**
954  * Walker for reachable nodes count.
955  */
956 static void update_node_stat(ir_node *node, void *env)
957 {
958         graph_entry_t *graph = (graph_entry_t*)env;
959         node_entry_t *entry;
960
961         ir_op *op = stat_get_irn_op(node);
962         int i, arity = get_irn_arity(node);
963
964         entry = opcode_get_entry(op, graph->opcode_hash);
965
966         cnt_inc(&entry->cnt_alive);
967         cnt_add_i(&graph->cnt[gcnt_edges], arity);
968
969         /* count block edges */
970         undate_block_info(node, graph);
971
972         /* count extended block edges */
973         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
974                 if (graph->irg != get_const_code_irg())
975                         update_extbb_info(node, graph);
976         }  /* if */
977
978         /* handle statistics for special node types */
979
980         switch (op->code) {
981         case iro_Call:
982                 /* check for properties that depends on calls like recursion/leaf/indirect call */
983                 stat_update_call(node, graph);
984                 break;
985         case iro_Load:
986                 /* check address properties */
987                 stat_update_address(get_Load_ptr(node), graph);
988                 break;
989         case iro_Store:
990                 /* check address properties */
991                 stat_update_address(get_Store_ptr(node), graph);
992                 break;
993         case iro_Phi:
994                 /* check for non-strict Phi nodes */
995                 for (i = arity - 1; i >= 0; --i) {
996                         ir_node *pred = get_Phi_pred(node, i);
997                         if (is_Unknown(pred)) {
998                                 /* found an Unknown predecessor, graph is not strict */
999                                 graph->is_strict = 0;
1000                                 break;
1001                         }
1002                 }
1003         default:
1004                 ;
1005         }  /* switch */
1006
1007         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1008         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1009                 int i;
1010
1011                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1012                         ir_node *pred = get_irn_n(node, i);
1013
1014                         if (is_Const(pred)) {
1015                                 /* check properties of constants */
1016                                 stat_update_const(status, pred, graph);
1017                         }  /* if */
1018                 }  /* for */
1019         }  /* if */
1020 }  /* update_node_stat */
1021
1022 /**
1023  * Walker for reachable nodes count for graphs on the wait_q.
1024  */
1025 static void update_node_stat_2(ir_node *node, void *env)
1026 {
1027         graph_entry_t *graph = (graph_entry_t*)env;
1028
1029         /* check for properties that depends on calls like recursion/leaf/indirect call */
1030         if (is_Call(node))
1031                 stat_update_call_2(node, graph);
1032 }  /* update_node_stat_2 */
1033
1034 /**
1035  * Get the current address mark.
1036  */
1037 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1038 {
1039         address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1040
1041         return value ? value->mark : 0;
1042 }  /* get_adr_mark */
1043
1044 /**
1045  * Set the current address mark.
1046  */
1047 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1048 {
1049         address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1050
1051         value->mark = val;
1052 }  /* set_adr_mark */
1053
1054 #undef DUMP_ADR_MODE
1055
1056 #ifdef DUMP_ADR_MODE
1057 /**
1058  * a vcg attribute hook: Color a node with a different color if
1059  * it's identified as a part of an address expression or at least referenced
1060  * by an address expression.
1061  */
1062 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1063 {
1064         ir_node *n           = local ? local : node;
1065         ir_graph *irg        = get_irn_irg(n);
1066         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1067         unsigned mark        = get_adr_mark(graph, n);
1068
1069         if (mark & MARK_ADDRESS_CALC)
1070                 fprintf(F, "color: purple");
1071         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1072                 fprintf(F, "color: pink");
1073         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1074                 fprintf(F, "color: lightblue");
1075         else
1076                 return 0;
1077
1078         /* I know the color! */
1079         return 1;
1080 }  /* stat_adr_mark_hook */
1081 #endif /* DUMP_ADR_MODE */
1082
1083 /**
1084  * Return the "operational" mode of a Firm node.
1085  */
1086 static ir_mode *get_irn_op_mode(ir_node *node)
1087 {
1088         switch (get_irn_opcode(node)) {
1089         case iro_Load:
1090                 return get_Load_mode(node);
1091         case iro_Store:
1092                 return get_irn_mode(get_Store_value(node));
1093         case iro_Div:
1094                 return get_irn_mode(get_Div_left(node));
1095         case iro_Mod:
1096                 return get_irn_mode(get_Mod_left(node));
1097         case iro_Cmp:
1098                 /* Cmp is no address calculation, or is it? */
1099         default:
1100                 return get_irn_mode(node);
1101         }  /* switch */
1102 }  /* get_irn_op_mode */
1103
1104 /**
1105  * Post-walker that marks every node that is an address calculation.
1106  *
1107  * Users of a node must be visited first. We ensure this by
1108  * calling it in the post of an outs walk. This should work even in cycles,
1109  * while the normal pre-walk will not.
1110  */
1111 static void mark_address_calc(ir_node *node, void *env)
1112 {
1113         graph_entry_t *graph = (graph_entry_t*)env;
1114         ir_mode *mode = get_irn_op_mode(node);
1115         int i, n;
1116         unsigned mark_preds = MARK_REF_NON_ADR;
1117
1118         if (! mode_is_data(mode))
1119                 return;
1120
1121         if (mode_is_reference(mode)) {
1122                 /* a reference is calculated here, we are sure */
1123                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1124
1125                 mark_preds = MARK_REF_ADR;
1126         } else {
1127                 unsigned mark = get_adr_mark(graph, node);
1128
1129                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1130                         /*
1131                          * this node has no reference mode, but is only
1132                          * referenced by address calculations
1133                          */
1134                         mark_preds = MARK_REF_ADR;
1135                 }  /* if */
1136         }  /* if */
1137
1138         /* mark all predecessors */
1139         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1140                 ir_node *pred = get_irn_n(node, i);
1141
1142                 mode = get_irn_op_mode(pred);
1143                 if (! mode_is_data(mode))
1144                         continue;
1145
1146                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1147         }  /* for */
1148 }  /* mark_address_calc */
1149
1150 /**
1151  * Post-walker that marks every node that is an address calculation.
1152  *
1153  * Users of a node must be visited first. We ensure this by
1154  * calling it in the post of an outs walk. This should work even in cycles,
1155  * while the normal pre-walk will not.
1156  */
1157 static void count_adr_ops(ir_node *node, void *env)
1158 {
1159         graph_entry_t *graph = (graph_entry_t*)env;
1160         unsigned mark        = get_adr_mark(graph, node);
1161
1162         if (mark & MARK_ADDRESS_CALC)
1163                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1164         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1165                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1166         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1167                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1168 }  /* count_adr_ops */
1169
1170 /**
1171  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1172  * is called, must recalculate all statistic info.
1173  *
1174  * @param global    The global entry
1175  * @param graph     The current entry
1176  */
1177 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1178 {
1179         node_entry_t *entry;
1180         int i;
1181
1182         /* clear first the alive counter in the graph */
1183         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1184                 cnt_clr(&entry->cnt_alive);
1185         }  /* foreach_pset */
1186
1187         /* set pessimistic values */
1188         graph->is_leaf       = 1;
1189         graph->is_leaf_call  = LCS_UNKNOWN;
1190         graph->is_recursive  = 0;
1191         graph->is_chain_call = 1;
1192         graph->is_strict     = 1;
1193
1194         /* create new block counter */
1195         graph->block_hash = new_pset(block_cmp, 5);
1196
1197         /* we need dominator info */
1198         if (graph->irg != get_const_code_irg()) {
1199                 assure_doms(graph->irg);
1200
1201                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1202                         /* we need extended basic blocks */
1203                         compute_extbb(graph->irg);
1204
1205                         /* create new extbb counter */
1206                         graph->extbb_hash = new_pset(block_cmp, 5);
1207                 }  /* if */
1208         }  /* if */
1209
1210         /* count the nodes in the graph */
1211         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1212
1213 #if 0
1214         /* Uncomment this code if chain-call means call exact one. */
1215         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1216
1217         /* check if we have more than 1 call */
1218         if (cnt_gt(entry->cnt_alive, 1))
1219                 graph->is_chain_call = 0;
1220 #endif
1221
1222         /* recursive functions are never chain calls, leafs don't have calls */
1223         if (graph->is_recursive || graph->is_leaf)
1224                 graph->is_chain_call = 0;
1225
1226         /* assume we walk every graph only ONCE, we could sum here the global count */
1227         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1228                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1229
1230                 /* update the node counter */
1231                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1232         }  /* foreach_pset */
1233
1234         /* count the number of address calculation */
1235         if (graph->irg != get_const_code_irg()) {
1236                 ir_graph *rem = current_ir_graph;
1237
1238                 assure_irg_outs(graph->irg);
1239
1240                 /* Must be done an the outs graph */
1241                 current_ir_graph = graph->irg;
1242                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1243                 current_ir_graph = rem;
1244
1245 #ifdef DUMP_ADR_MODE
1246                 /* register the vcg hook and dump the graph for test */
1247                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1248                 dump_ir_block_graph(graph->irg, "-adr");
1249                 set_dump_node_vcgattr_hook(NULL);
1250 #endif /* DUMP_ADR_MODE */
1251
1252                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1253         }  /* if */
1254
1255         /* count the DAG's */
1256         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1257                 count_dags_in_graph(global, graph);
1258
1259         /* calculate the patterns of this graph */
1260         stat_calc_pattern_history(graph->irg);
1261
1262         /* leaf function did not call others */
1263         if (graph->is_leaf)
1264                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1265         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1266                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1267                 pdeq_putl(status->wait_q, graph);
1268         }  /* if */
1269
1270         /* we have analyzed this graph */
1271         graph->is_analyzed = 1;
1272
1273         /* accumulate all counter's */
1274         for (i = 0; i < _gcnt_last; ++i)
1275                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1276 }  /* update_graph_stat */
1277
1278 /**
1279  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1280  * must finish all statistic info calculations.
1281  *
1282  * @param global    The global entry
1283  * @param graph     The current entry
1284  */
1285 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1286 {
1287         (void) global;
1288         if (graph->is_deleted) {
1289                 /* deleted, ignore */
1290                 return;
1291         }
1292
1293         if (graph->irg) {
1294                 /* count the nodes in the graph */
1295                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1296
1297                 if (graph->is_leaf_call == LCS_UNKNOWN)
1298                         graph->is_leaf_call = LCS_LEAF_CALL;
1299         }  /* if */
1300 }  /* update_graph_stat_2 */
1301
1302 /**
1303  * Register a dumper.
1304  */
1305 static void stat_register_dumper(const dumper_t *dumper)
1306 {
1307         dumper_t *p = XMALLOC(dumper_t);
1308
1309         memcpy(p, dumper, sizeof(*p));
1310
1311         p->next        = status->dumper;
1312         p->status      = status;
1313         status->dumper = p;
1314
1315         /* FIXME: memory leak */
1316 }  /* stat_register_dumper */
1317
1318 /**
1319  * Dumps the statistics of an IR graph.
1320  */
1321 static void stat_dump_graph(graph_entry_t *entry)
1322 {
1323         dumper_t *dumper;
1324
1325         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1326                 if (dumper->dump_graph)
1327                         dumper->dump_graph(dumper, entry);
1328         }  /* for */
1329 }  /* stat_dump_graph */
1330
1331 /**
1332  * Calls all registered dumper functions.
1333  */
1334 static void stat_dump_registered(graph_entry_t *entry)
1335 {
1336         dumper_t *dumper;
1337
1338         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1339                 if (dumper->func_map) {
1340                         dump_graph_FUNC func;
1341
1342                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1343                                 func(dumper, entry);
1344                 }  /* if */
1345         }  /* for */
1346 }  /* stat_dump_registered */
1347
1348 /**
1349  * Dumps a constant table.
1350  */
1351 static void stat_dump_consts(const constant_info_t *tbl)
1352 {
1353         dumper_t *dumper;
1354
1355         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1356                 if (dumper->dump_const_tbl)
1357                         dumper->dump_const_tbl(dumper, tbl);
1358         }  /* for */
1359 }  /* stat_dump_consts */
1360
1361 /**
1362  * Dumps the parameter distribution
1363  */
1364 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1365 {
1366         dumper_t *dumper;
1367
1368         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1369                 if (dumper->dump_param_tbl)
1370                         dumper->dump_param_tbl(dumper, tbl, global);
1371         }  /* for */
1372 }  /* stat_dump_param_tbl */
1373
1374 /**
1375  * Dumps the optimization counter
1376  */
1377 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1378 {
1379         dumper_t *dumper;
1380
1381         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1382                 if (dumper->dump_opt_cnt)
1383                         dumper->dump_opt_cnt(dumper, tbl, len);
1384         }  /* for */
1385 }  /* stat_dump_opt_cnt */
1386
1387 /**
1388  * Initialize the dumper.
1389  */
1390 static void stat_dump_init(const char *name)
1391 {
1392         dumper_t *dumper;
1393
1394         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1395                 if (dumper->init)
1396                         dumper->init(dumper, name);
1397         }  /* for */
1398 }  /* stat_dump_init */
1399
1400 /**
1401  * Finish the dumper.
1402  */
1403 static void stat_dump_finish(void)
1404 {
1405         dumper_t *dumper;
1406
1407         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1408                 if (dumper->finish)
1409                         dumper->finish(dumper);
1410         }  /* for */
1411 }  /* stat_dump_finish */
1412
1413 /**
1414  * Register an additional function for all dumper.
1415  */
1416 void stat_register_dumper_func(dump_graph_FUNC func)
1417 {
1418         dumper_t *dumper;
1419
1420         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1421                 if (! dumper->func_map)
1422                         dumper->func_map = pset_new_ptr(3);
1423                 pset_insert_ptr(dumper->func_map, (void*)func);
1424         }  /* for */
1425 }  /* stat_register_dumper_func */
1426
1427 /* ---------------------------------------------------------------------- */
1428
1429 /*
1430  * Helper: get an ir_op from an opcode.
1431  */
1432 ir_op *stat_get_op_from_opcode(unsigned code)
1433 {
1434         return opcode_find_entry(code, status->ir_op_hash);
1435 }  /* stat_get_op_from_opcode */
1436
1437 /**
1438  * Hook: A new IR op is registered.
1439  *
1440  * @param ctx  the hook context
1441  * @param op   the new IR opcode that was created.
1442  */
1443 static void stat_new_ir_op(void *ctx, ir_op *op)
1444 {
1445         (void) ctx;
1446         if (! status->stat_options)
1447                 return;
1448
1449         STAT_ENTER;
1450         {
1451                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1452
1453                 /* execute for side effect :-) */
1454                 (void)opcode_get_entry(op, graph->opcode_hash);
1455
1456                 pset_insert(status->ir_op_hash, op, op->code);
1457         }
1458         STAT_LEAVE;
1459 }  /* stat_new_ir_op */
1460
1461 /**
1462  * Hook: An IR op is freed.
1463  *
1464  * @param ctx  the hook context
1465  * @param op   the IR opcode that is freed
1466  */
1467 static void stat_free_ir_op(void *ctx, ir_op *op)
1468 {
1469         (void) ctx;
1470         (void) op;
1471         if (! status->stat_options)
1472                 return;
1473
1474         STAT_ENTER;
1475         {
1476         }
1477         STAT_LEAVE;
1478 }  /* stat_free_ir_op */
1479
1480 /**
1481  * Hook: A new node is created.
1482  *
1483  * @param ctx   the hook context
1484  * @param irg   the IR graph on which the node is created
1485  * @param node  the new IR node that was created
1486  */
1487 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1488 {
1489         (void) ctx;
1490         (void) irg;
1491         if (! status->stat_options)
1492                 return;
1493
1494         /* do NOT count during dead node elimination */
1495         if (status->in_dead_node_elim)
1496                 return;
1497
1498         STAT_ENTER;
1499         {
1500                 node_entry_t *entry;
1501                 graph_entry_t *graph;
1502                 ir_op *op = stat_get_irn_op(node);
1503
1504                 /* increase global value */
1505                 graph = graph_get_entry(NULL, status->irg_hash);
1506                 entry = opcode_get_entry(op, graph->opcode_hash);
1507                 cnt_inc(&entry->new_node);
1508
1509                 /* increase local value */
1510                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1511                 entry = opcode_get_entry(op, graph->opcode_hash);
1512                 cnt_inc(&entry->new_node);
1513         }
1514         STAT_LEAVE;
1515 }  /* stat_new_node */
1516
1517 /**
1518  * Hook: A node is changed into a Id node
1519  *
1520  * @param ctx   the hook context
1521  * @param node  the IR node that will be turned into an ID
1522  */
1523 static void stat_turn_into_id(void *ctx, ir_node *node)
1524 {
1525         (void) ctx;
1526         if (! status->stat_options)
1527                 return;
1528
1529         STAT_ENTER;
1530         {
1531                 node_entry_t *entry;
1532                 graph_entry_t *graph;
1533                 ir_op *op = stat_get_irn_op(node);
1534
1535                 /* increase global value */
1536                 graph = graph_get_entry(NULL, status->irg_hash);
1537                 entry = opcode_get_entry(op, graph->opcode_hash);
1538                 cnt_inc(&entry->into_Id);
1539
1540                 /* increase local value */
1541                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1542                 entry = opcode_get_entry(op, graph->opcode_hash);
1543                 cnt_inc(&entry->into_Id);
1544         }
1545         STAT_LEAVE;
1546 }  /* stat_turn_into_id */
1547
1548 /**
1549  * Hook: A node is normalized
1550  *
1551  * @param ctx   the hook context
1552  * @param node  the IR node that was normalized
1553  */
1554 static void stat_normalize(void *ctx, ir_node *node)
1555 {
1556         (void) ctx;
1557         if (! status->stat_options)
1558                 return;
1559
1560         STAT_ENTER;
1561         {
1562                 node_entry_t *entry;
1563                 graph_entry_t *graph;
1564                 ir_op *op = stat_get_irn_op(node);
1565
1566                 /* increase global value */
1567                 graph = graph_get_entry(NULL, status->irg_hash);
1568                 entry = opcode_get_entry(op, graph->opcode_hash);
1569                 cnt_inc(&entry->normalized);
1570
1571                 /* increase local value */
1572                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1573                 entry = opcode_get_entry(op, graph->opcode_hash);
1574                 cnt_inc(&entry->normalized);
1575         }
1576         STAT_LEAVE;
1577 }  /* stat_normalize */
1578
1579 /**
1580  * Hook: A new graph was created
1581  *
1582  * @param ctx  the hook context
1583  * @param irg  the new IR graph that was created
1584  * @param ent  the entity of this graph
1585  */
1586 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1587 {
1588         (void) ctx;
1589         if (! status->stat_options)
1590                 return;
1591
1592         STAT_ENTER;
1593         {
1594                 /* execute for side effect :-) */
1595                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1596
1597                 graph->ent           = ent;
1598                 graph->is_deleted    = 0;
1599                 graph->is_leaf       = 0;
1600                 graph->is_leaf_call  = 0;
1601                 graph->is_recursive  = 0;
1602                 graph->is_chain_call = 0;
1603                 graph->is_strict     = 1;
1604                 graph->is_analyzed   = 0;
1605         }
1606         STAT_LEAVE;
1607 }  /* stat_new_graph */
1608
1609 /**
1610  * Hook: A graph will be deleted
1611  *
1612  * @param ctx  the hook context
1613  * @param irg  the IR graph that will be deleted
1614  *
1615  * Note that we still hold the information for this graph
1616  * in our hash maps, only a flag is set which prevents this
1617  * information from being changed, it's "frozen" from now.
1618  */
1619 static void stat_free_graph(void *ctx, ir_graph *irg)
1620 {
1621         (void) ctx;
1622         if (! status->stat_options)
1623                 return;
1624
1625         STAT_ENTER;
1626         {
1627                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1628                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1629
1630                 graph->is_deleted = 1;
1631
1632                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1633                         /* count the nodes of the graph yet, it will be destroyed later */
1634                         update_graph_stat(global, graph);
1635                 }  /* if */
1636         }
1637         STAT_LEAVE;
1638 }  /* stat_free_graph */
1639
1640 /**
1641  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1642  *
1643  * @param ctx  the hook context
1644  * @param irg  the IR graph that will be walked
1645  * @param pre  the pre walker
1646  * @param post the post walker
1647  */
1648 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1649 {
1650         (void) ctx;
1651         (void) pre;
1652         (void) post;
1653         if (! status->stat_options)
1654                 return;
1655
1656         STAT_ENTER_SINGLE;
1657         {
1658                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1659
1660                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1661         }
1662         STAT_LEAVE;
1663 }  /* stat_irg_walk */
1664
1665 /**
1666  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1667  *
1668  * @param ctx  the hook context
1669  * @param irg  the IR graph that will be walked
1670  * @param pre  the pre walker
1671  * @param post the post walker
1672  */
1673 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1674 {
1675         /* for now, do NOT differentiate between blockwise and normal */
1676         stat_irg_walk(ctx, irg, pre, post);
1677 }  /* stat_irg_walk_blkwise */
1678
1679 /**
1680  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1681  *
1682  * @param ctx  the hook context
1683  * @param irg  the IR graph that will be walked
1684  * @param node the IR node
1685  * @param pre  the pre walker
1686  * @param post the post walker
1687  */
1688 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1689 {
1690         (void) ctx;
1691         (void) node;
1692         (void) pre;
1693         (void) post;
1694         if (! status->stat_options)
1695                 return;
1696
1697         STAT_ENTER_SINGLE;
1698         {
1699                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1700
1701                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1702         }
1703         STAT_LEAVE;
1704 }  /* stat_irg_block_walk */
1705
1706 /**
1707  * Called for every node that is removed due to an optimization.
1708  *
1709  * @param n     the IR node that will be removed
1710  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1711  * @param kind  the optimization kind
1712  */
1713 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1714 {
1715         opt_entry_t *entry;
1716         ir_op *op = stat_get_irn_op(n);
1717
1718         /* ignore CSE for Constants */
1719         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1720                 return;
1721
1722         /* increase global value */
1723         entry = opt_get_entry(op, hmap);
1724         cnt_inc(&entry->count);
1725 }  /* removed_due_opt */
1726
1727 /**
1728  * Hook: Some nodes were optimized into some others due to an optimization.
1729  *
1730  * @param ctx  the hook context
1731  */
1732 static void stat_merge_nodes(
1733     void *ctx,
1734     ir_node **new_node_array, int new_num_entries,
1735     ir_node **old_node_array, int old_num_entries,
1736     hook_opt_kind opt)
1737 {
1738         (void) ctx;
1739         if (! status->stat_options)
1740                 return;
1741
1742         STAT_ENTER;
1743         {
1744                 int i, j;
1745                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1746
1747                 cnt_inc(&status->num_opts[opt]);
1748                 if (status->reassoc_run)
1749                         opt = HOOK_OPT_REASSOC;
1750
1751                 for (i = 0; i < old_num_entries; ++i) {
1752                         /* nodes might be in new and old, so if we found a node
1753                            in both sets, this one  is NOT removed */
1754                         for (j = 0; j < new_num_entries; ++j) {
1755                                 if (old_node_array[i] == new_node_array[j])
1756                                         break;
1757                         }  /* for */
1758                         if (j >= new_num_entries) {
1759                                 int xopt = opt;
1760
1761                                 /* sometimes we did not detect, that it is replaced by a Const */
1762                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1763                                         ir_op *op = get_irn_op(new_node_array[0]);
1764
1765                                         if (op == op_Const || op == op_SymConst)
1766                                                 xopt = HOOK_OPT_CONFIRM_C;
1767                                 }  /* if */
1768
1769                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1770                         }  /* if */
1771                 }  /* for */
1772         }
1773         STAT_LEAVE;
1774 }  /* stat_merge_nodes */
1775
1776 /**
1777  * Hook: Reassociation is started/stopped.
1778  *
1779  * @param ctx   the hook context
1780  * @param flag  if non-zero, reassociation is started else stopped
1781  */
1782 static void stat_reassociate(void *ctx, int flag)
1783 {
1784         (void) ctx;
1785         if (! status->stat_options)
1786                 return;
1787
1788         STAT_ENTER;
1789         {
1790                 status->reassoc_run = flag;
1791         }
1792         STAT_LEAVE;
1793 }  /* stat_reassociate */
1794
1795 /**
1796  * Hook: A node was lowered into other nodes
1797  *
1798  * @param ctx  the hook context
1799  * @param node the IR node that will be lowered
1800  */
1801 static void stat_lower(void *ctx, ir_node *node)
1802 {
1803         (void) ctx;
1804         if (! status->stat_options)
1805                 return;
1806
1807         STAT_ENTER;
1808         {
1809                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1810
1811                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1812         }
1813         STAT_LEAVE;
1814 }  /* stat_lower */
1815
1816 /**
1817  * Hook: A graph was inlined.
1818  *
1819  * @param ctx  the hook context
1820  * @param call the IR call that will re changed into the body of
1821  *             the called IR graph
1822  * @param called_irg  the IR graph representing the called routine
1823  */
1824 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1825 {
1826         (void) ctx;
1827         if (! status->stat_options)
1828                 return;
1829
1830         STAT_ENTER;
1831         {
1832                 ir_graph *irg = get_irn_irg(call);
1833                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1834                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1835
1836                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1837                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1838         }
1839         STAT_LEAVE;
1840 }  /* stat_inline */
1841
1842 /**
1843  * Hook: A graph with tail-recursions was optimized.
1844  *
1845  * @param ctx  the hook context
1846  */
1847 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1848 {
1849         (void) ctx;
1850         if (! status->stat_options)
1851                 return;
1852
1853         STAT_ENTER;
1854         {
1855                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1856
1857                 graph->num_tail_recursion += n_calls;
1858         }
1859         STAT_LEAVE;
1860 }  /* stat_tail_rec */
1861
1862 /**
1863  * Strength reduction was performed on an iteration variable.
1864  *
1865  * @param ctx  the hook context
1866  */
1867 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1868 {
1869         (void) ctx;
1870         if (! status->stat_options)
1871                 return;
1872
1873         STAT_ENTER;
1874         {
1875                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1876                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1877
1878                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1879         }
1880         STAT_LEAVE;
1881 }  /* stat_strength_red */
1882
1883 /**
1884  * Hook: Start/Stop the dead node elimination.
1885  *
1886  * @param ctx  the hook context
1887  */
1888 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1889 {
1890         (void) ctx;
1891         (void) irg;
1892         if (! status->stat_options)
1893                 return;
1894
1895         status->in_dead_node_elim = (start != 0);
1896 }  /* stat_dead_node_elim */
1897
1898 /**
1899  * Hook: if-conversion was tried.
1900  */
1901 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1902                                int pos, ir_node *mux, if_result_t reason)
1903 {
1904         (void) context;
1905         (void) phi;
1906         (void) pos;
1907         (void) mux;
1908         if (! status->stat_options)
1909                 return;
1910
1911         STAT_ENTER;
1912         {
1913                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1914
1915                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1916         }
1917         STAT_LEAVE;
1918 }  /* stat_if_conversion */
1919
1920 /**
1921  * Hook: real function call was optimized.
1922  */
1923 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1924 {
1925         (void) context;
1926         (void) call;
1927         if (! status->stat_options)
1928                 return;
1929
1930         STAT_ENTER;
1931         {
1932                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1933
1934                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1935         }
1936         STAT_LEAVE;
1937 }  /* stat_func_call */
1938
1939 /**
1940  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1941  *
1942  * @param ctx  the hook context
1943  */
1944 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1945 {
1946         (void) ctx;
1947         if (! status->stat_options)
1948                 return;
1949
1950         STAT_ENTER;
1951         {
1952                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1953                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1954         }
1955         STAT_LEAVE;
1956 }  /* stat_arch_dep_replace_mul_with_shifts */
1957
1958 /**
1959  * Hook: A division by const was replaced.
1960  *
1961  * @param ctx   the hook context
1962  * @param node  the division node that will be optimized
1963  */
1964 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1965 {
1966         (void) ctx;
1967         if (! status->stat_options)
1968                 return;
1969
1970         STAT_ENTER;
1971         {
1972                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1973                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1974         }
1975         STAT_LEAVE;
1976 }  /* stat_arch_dep_replace_division_by_const */
1977
1978 /*
1979  * Update the register pressure of a block.
1980  *
1981  * @param irg        the irg containing the block
1982  * @param block      the block for which the reg pressure should be set
1983  * @param pressure   the pressure
1984  * @param class_name the name of the register class
1985  */
1986 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1987 {
1988         if (! status->stat_options)
1989                 return;
1990
1991         STAT_ENTER;
1992         {
1993                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1994                 be_block_entry_t     *block_ent;
1995                 reg_pressure_entry_t *rp_ent;
1996
1997                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1998                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
1999
2000                 rp_ent->class_name = class_name;
2001                 rp_ent->pressure   = pressure;
2002
2003                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2004         }
2005         STAT_LEAVE;
2006 }  /* stat_be_block_regpressure */
2007
2008 /**
2009  * Update the distribution of ready nodes of a block
2010  *
2011  * @param irg        the irg containing the block
2012  * @param block      the block for which the reg pressure should be set
2013  * @param num_ready  the number of ready nodes
2014  */
2015 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2016 {
2017         if (! status->stat_options)
2018                 return;
2019
2020         STAT_ENTER;
2021         {
2022                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2023                 be_block_entry_t *block_ent;
2024
2025                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2026
2027                 /* increase the counter of corresponding number of ready nodes */
2028                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2029         }
2030         STAT_LEAVE;
2031 }  /* stat_be_block_sched_ready */
2032
2033 /**
2034  * Update the permutation statistic of a block.
2035  *
2036  * @param class_name the name of the register class
2037  * @param n_regs     number of registers in the register class
2038  * @param perm       the perm node
2039  * @param block      the block containing the perm
2040  * @param size       the size of the perm
2041  * @param real_size  number of pairs with different registers
2042  */
2043 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2044                              int size, int real_size)
2045 {
2046         if (! status->stat_options)
2047                 return;
2048
2049         STAT_ENTER;
2050         {
2051                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2052                 be_block_entry_t   *block_ent;
2053                 perm_class_entry_t *pc_ent;
2054                 perm_stat_entry_t  *ps_ent;
2055
2056                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2057                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2058                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2059
2060                 pc_ent->n_regs = n_regs;
2061
2062                 /* update information */
2063                 ps_ent->size      = size;
2064                 ps_ent->real_size = real_size;
2065         }
2066         STAT_LEAVE;
2067 }  /* stat_be_block_stat_perm */
2068
2069 /**
2070  * Update the permutation statistic of a single perm.
2071  *
2072  * @param class_name the name of the register class
2073  * @param perm       the perm node
2074  * @param block      the block containing the perm
2075  * @param is_chain   1 if chain, 0 if cycle
2076  * @param size       length of the cycle/chain
2077  * @param n_ops      the number of ops representing this cycle/chain after lowering
2078  */
2079 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2080                                   int is_chain, int size, int n_ops)
2081 {
2082         if (! status->stat_options)
2083                 return;
2084
2085         STAT_ENTER;
2086         {
2087                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2088                 be_block_entry_t   *block_ent;
2089                 perm_class_entry_t *pc_ent;
2090                 perm_stat_entry_t  *ps_ent;
2091
2092                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2093                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2094                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2095
2096                 if (is_chain) {
2097                         ps_ent->n_copies += n_ops;
2098                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2099                 } else {
2100                         ps_ent->n_exchg += n_ops;
2101                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2102                 }  /* if */
2103         }
2104         STAT_LEAVE;
2105 }  /* stat_be_block_stat_permcycle */
2106
2107 /* Dumps a statistics snapshot. */
2108 void stat_dump_snapshot(const char *name, const char *phase)
2109 {
2110         char fname[2048];
2111         const char *p;
2112         size_t l;
2113
2114         if (! status->stat_options)
2115                 return;
2116
2117         STAT_ENTER;
2118         {
2119                 graph_entry_t *entry;
2120                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2121
2122                 /*
2123                  * The constant counter is only global, so we clear it here.
2124                  * Note that it does NOT contain the constants in DELETED
2125                  * graphs due to this.
2126                  */
2127                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2128                         stat_const_clear(status);
2129
2130                 /* build the name */
2131                 p = strrchr(name, '/');
2132 #ifdef _WIN32
2133                 {
2134                         const char *q;
2135
2136                         q = strrchr(name, '\\');
2137
2138                         /* NULL might be not the smallest pointer */
2139                         if (q && (!p || q > p))
2140                                 p = q;
2141                 }
2142 #endif /* _WIN32 */
2143                 if (p) {
2144                         ++p;
2145                         l = p - name;
2146
2147                         if (l > (int) (sizeof(fname) - 1))
2148                                 l = sizeof(fname) - 1;
2149
2150                         memcpy(fname, name, l);
2151                         fname[l] = '\0';
2152                 } else {
2153                         fname[0] = '\0';
2154                         p = name;
2155                 }  /* if */
2156                 strncat(fname, "firmstat-", sizeof(fname)-1);
2157                 strncat(fname, phase, sizeof(fname)-1);
2158                 strncat(fname, "-", sizeof(fname)-1);
2159                 strncat(fname, p, sizeof(fname)-1);
2160
2161                 stat_dump_init(fname);
2162
2163                 /* calculate the graph statistics */
2164                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2165                       entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2166                         if (entry->irg == NULL) {
2167                                 /* special entry for the global count */
2168                                 continue;
2169                         }  /* if */
2170                         if (! entry->is_deleted) {
2171                                 /* the graph is still alive, count the nodes on it */
2172                                 update_graph_stat(global, entry);
2173                         }  /* if */
2174                 }  /* for */
2175
2176                 /* some calculations are dependent, we pushed them on the wait_q */
2177                 while (! pdeq_empty(status->wait_q)) {
2178                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2179
2180                         update_graph_stat_2(global, entry);
2181                 }  /* while */
2182
2183                 /* dump per graph */
2184                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2185                      entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2186                         if (entry->irg == NULL) {
2187                                 /* special entry for the global count */
2188                                 continue;
2189                         }  /* if */
2190
2191                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2192                                 stat_dump_graph(entry);
2193                                 stat_dump_registered(entry);
2194                         }  /* if */
2195
2196                         if (! entry->is_deleted) {
2197                                 /* clear the counter that are not accumulated */
2198                                 graph_clear_entry(entry, 0);
2199                         }  /* if */
2200                 }  /* for */
2201
2202                 /* dump global */
2203                 stat_dump_graph(global);
2204
2205                 /* dump the const info */
2206                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2207                         stat_dump_consts(&status->const_info);
2208
2209                 /* dump the parameter distribution */
2210                 stat_dump_param_tbl(status->dist_param_cnt, global);
2211
2212                 /* dump the optimization counter and clear them */
2213                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2214                 clear_optimization_counter();
2215
2216                 stat_dump_finish();
2217
2218                 stat_finish_pattern_history(fname);
2219
2220                 /* clear the global counters here */
2221                 {
2222                         node_entry_t *entry;
2223
2224                         for (entry = (node_entry_t*)pset_first(global->opcode_hash);
2225                              entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) {
2226                                 opcode_clear_entry(entry);
2227                         }  /* for */
2228                         /* clear all global counter */
2229                         graph_clear_entry(global, /*all=*/1);
2230                 }
2231         }
2232         STAT_LEAVE;
2233 }  /* stat_dump_snapshot */
2234
2235 typedef struct pass_t {
2236         ir_prog_pass_t pass;
2237         const char     *fname;
2238         const char     *phase;
2239 } pass_t;
2240
2241 /**
2242  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2243  */
2244 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2245 {
2246         pass_t *pass = (pass_t*)context;
2247
2248         (void)irp;
2249         stat_dump_snapshot(pass->fname, pass->phase);
2250         return 0;
2251 }  /* stat_dump_snapshot_wrapper */
2252
2253 /**
2254  * Ensure that no verifier is run from the wrapper.
2255  */
2256 static int no_verify(ir_prog *prog, void *ctx)
2257 {
2258         (void)prog;
2259         (void)ctx;
2260         return 0;
2261 }
2262
2263 /**
2264  * Ensure that no dumper is run from the wrapper.
2265  */
2266 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2267 {
2268         (void)prog;
2269         (void)ctx;
2270         (void)idx;
2271 }
2272
2273 /* create an ir_pog pass */
2274 ir_prog_pass_t *stat_dump_snapshot_pass(
2275         const char *name, const char *fname, const char *phase)
2276 {
2277         pass_t *pass = XMALLOCZ(pass_t);
2278
2279         def_prog_pass_constructor(
2280                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2281         pass->fname = fname;
2282         pass->phase = phase;
2283
2284         /* no dump/verify */
2285         pass->pass.dump_irprog   = no_dump;
2286         pass->pass.verify_irprog = no_verify;
2287
2288         return &pass->pass;
2289 }  /* stat_dump_snapshot_pass */
2290
2291 /** the hook entries for the Firm statistics module */
2292 static hook_entry_t stat_hooks[hook_last];
2293
2294 /* initialize the statistics module. */
2295 void firm_init_stat(unsigned enable_options)
2296 {
2297 #define X(a)  a, sizeof(a)-1
2298 #define HOOK(h, fkt) \
2299         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2300         unsigned num = 0;
2301
2302         if (! (enable_options & FIRMSTAT_ENABLED))
2303                 return;
2304
2305         status = XMALLOCZ(stat_info_t);
2306
2307         /* enable statistics */
2308         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2309
2310         /* register all hooks */
2311         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2312         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2313         HOOK(hook_new_node,                           stat_new_node);
2314         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2315         HOOK(hook_normalize,                          stat_normalize);
2316         HOOK(hook_new_graph,                          stat_new_graph);
2317         HOOK(hook_free_graph,                         stat_free_graph);
2318         HOOK(hook_irg_walk,                           stat_irg_walk);
2319         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2320         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2321         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2322         HOOK(hook_reassociate,                        stat_reassociate);
2323         HOOK(hook_lower,                              stat_lower);
2324         HOOK(hook_inline,                             stat_inline);
2325         HOOK(hook_tail_rec,                           stat_tail_rec);
2326         HOOK(hook_strength_red,                       stat_strength_red);
2327         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2328         HOOK(hook_if_conversion,                      stat_if_conversion);
2329         HOOK(hook_func_call,                          stat_func_call);
2330         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2331         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2332
2333         obstack_init(&status->cnts);
2334         obstack_init(&status->be_data);
2335
2336         /* create the hash-tables */
2337         status->irg_hash   = new_pset(graph_cmp, 8);
2338         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2339
2340         /* create the wait queue */
2341         status->wait_q     = new_pdeq();
2342
2343         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2344                 /* build the pseudo-ops */
2345
2346                 _op_Phi0.code    = --num;
2347                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2348
2349                 _op_PhiM.code    = --num;
2350                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2351
2352                 _op_ProjM.code   = --num;
2353                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2354
2355                 _op_MulC.code    = --num;
2356                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2357
2358                 _op_DivC.code    = --num;
2359                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2360
2361                 _op_ModC.code    = --num;
2362                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2363
2364                 status->op_Phi0    = &_op_Phi0;
2365                 status->op_PhiM    = &_op_PhiM;
2366                 status->op_ProjM   = &_op_ProjM;
2367                 status->op_MulC    = &_op_MulC;
2368                 status->op_DivC    = &_op_DivC;
2369                 status->op_ModC    = &_op_ModC;
2370         } else {
2371                 status->op_Phi0    = NULL;
2372                 status->op_PhiM    = NULL;
2373                 status->op_ProjM   = NULL;
2374                 status->op_MulC    = NULL;
2375                 status->op_DivC    = NULL;
2376                 status->op_ModC    = NULL;
2377         }  /* if */
2378
2379         /* for Florian: count the Sel depth */
2380         if (enable_options & FIRMSTAT_COUNT_SELS) {
2381                 _op_SelSel.code    = --num;
2382                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2383
2384                 _op_SelSelSel.code = --num;
2385                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2386
2387                 status->op_SelSel    = &_op_SelSel;
2388                 status->op_SelSelSel = &_op_SelSelSel;
2389         } else {
2390                 status->op_SelSel    = NULL;
2391                 status->op_SelSelSel = NULL;
2392         }  /* if */
2393
2394         /* register the dumper */
2395         stat_register_dumper(&simple_dumper);
2396
2397         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2398                 stat_register_dumper(&csv_dumper);
2399
2400         /* initialize the pattern hash */
2401         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2402
2403         /* initialize the Const options */
2404         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2405                 stat_init_const_cnt(status);
2406
2407         /* distribution table for parameter counts */
2408         status->dist_param_cnt = stat_new_int_distrib_tbl();
2409
2410         clear_optimization_counter();
2411
2412 #undef HOOK
2413 #undef X
2414 }  /* firm_init_stat */
2415
2416 /**
2417  * Frees all dumper structures.
2418  */
2419 static void stat_term_dumper(void)
2420 {
2421         dumper_t *dumper, *next_dumper;
2422
2423         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2424                 if (dumper->func_map)
2425                         del_pset(dumper->func_map);
2426
2427                 next_dumper = dumper->next;
2428                 free(dumper);
2429                 dumper = next_dumper;
2430         }  /* for */
2431 }  /* stat_term_dumper */
2432
2433
2434 /* Terminates the statistics module, frees all memory. */
2435 void stat_term(void)
2436 {
2437         if (status != (stat_info_t *)&status_disable) {
2438                 obstack_free(&status->be_data, NULL);
2439                 obstack_free(&status->cnts, NULL);
2440
2441                 stat_term_dumper();
2442
2443                 xfree(status);
2444                 status = (stat_info_t *)&status_disable;
2445         }
2446 }  /* stat_term */
2447
2448 /* returns 1 if statistics were initialized, 0 otherwise */
2449 int stat_is_active(void)
2450 {
2451         return status != (stat_info_t *)&status_disable;
2452 }  /* stat_is_active */