Use foreach_pset().
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  */
25 #include "config.h"
26
27 #include <stdio.h>
28 #include <stdlib.h>
29 #include <string.h>
30
31 #include "irouts.h"
32 #include "irdump.h"
33 #include "hashptr.h"
34 #include "firmstat_t.h"
35 #include "irpass_t.h"
36 #include "pattern.h"
37 #include "dags.h"
38 #include "stat_dmp.h"
39 #include "xmalloc.h"
40 #include "irhooks.h"
41 #include "util.h"
42 #include "ircons.h"
43
44 /*
45  * need this to be static:
46  * Special pseudo Opcodes that we need to count some interesting cases
47  */
48
49 /**
50  * The Phi0, a node that is created during SSA construction
51  */
52 static ir_op _op_Phi0;
53
54 /** The PhiM, just to count memory Phi's. */
55 static ir_op _op_PhiM;
56
57 /** The Mul by Const node. */
58 static ir_op _op_MulC;
59
60 /** The Div by Const node. */
61 static ir_op _op_DivC;
62
63 /** The Div by Const node. */
64 static ir_op _op_ModC;
65
66 /** The memory Proj node. */
67 static ir_op _op_ProjM;
68
69 /** A Sel of a Sel */
70 static ir_op _op_SelSel;
71
72 /** A Sel of a Sel of a Sel */
73 static ir_op _op_SelSelSel;
74
75 /* ---------------------------------------------------------------------------------- */
76
77 /** Marks the begin of a statistic (hook) function. */
78 #define STAT_ENTER    ++status->recursive
79
80 /** Marks the end of a statistic (hook) functions. */
81 #define STAT_LEAVE    --status->recursive
82
83 /** Allows to enter a statistic function only when we are not already in a hook. */
84 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
85
86 /**
87  * global status
88  */
89 static const unsigned status_disable = 0;
90 static stat_info_t *status = (stat_info_t *)&status_disable;
91
92 /**
93  * Compare two elements of the opcode hash.
94  */
95 static int opcode_cmp(const void *elt, const void *key)
96 {
97         const node_entry_t *e1 = (const node_entry_t*)elt;
98         const node_entry_t *e2 = (const node_entry_t*)key;
99
100         return e1->op->code - e2->op->code;
101 }  /* opcode_cmp */
102
103 /**
104  * Compare two elements of the graph hash.
105  */
106 static int graph_cmp(const void *elt, const void *key)
107 {
108         const graph_entry_t *e1 = (const graph_entry_t*)elt;
109         const graph_entry_t *e2 = (const graph_entry_t*)key;
110
111         return e1->irg != e2->irg;
112 }  /* graph_cmp */
113
114 /**
115  * Compare two elements of the optimization hash.
116  */
117 static int opt_cmp(const void *elt, const void *key)
118 {
119         const opt_entry_t *e1 = (const opt_entry_t*)elt;
120         const opt_entry_t *e2 = (const opt_entry_t*)key;
121
122         return e1->op->code != e2->op->code;
123 }  /* opt_cmp */
124
125 /**
126  * Compare two elements of the block hash.
127  */
128 static int block_cmp(const void *elt, const void *key)
129 {
130         const block_entry_t *e1 = (const block_entry_t*)elt;
131         const block_entry_t *e2 = (const block_entry_t*)key;
132
133         /* it's enough to compare the block number */
134         return e1->block_nr != e2->block_nr;
135 }  /* block_cmp */
136
137 /**
138  * Compare two elements of the be_block hash.
139  */
140 static int be_block_cmp(const void *elt, const void *key)
141 {
142         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
143         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
144
145         return e1->block_nr != e2->block_nr;
146 }  /* be_block_cmp */
147
148 /**
149  * Compare two elements of reg pressure hash.
150  */
151 static int reg_pressure_cmp(const void *elt, const void *key)
152 {
153         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
154         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
155
156         return e1->class_name != e2->class_name;
157 }  /* reg_pressure_cmp */
158
159 /**
160  * Compare two elements of the perm_stat hash.
161  */
162 static int perm_stat_cmp(const void *elt, const void *key)
163 {
164         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
165         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
166
167         return e1->perm != e2->perm;
168 }  /* perm_stat_cmp */
169
170 /**
171  * Compare two elements of the perm_class hash.
172  */
173 static int perm_class_cmp(const void *elt, const void *key)
174 {
175         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
176         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
177
178         return e1->class_name != e2->class_name;
179 }  /* perm_class_cmp */
180
181 /**
182  * Compare two elements of the ir_op hash.
183  */
184 static int opcode_cmp_2(const void *elt, const void *key)
185 {
186         const ir_op *e1 = (const ir_op*)elt;
187         const ir_op *e2 = (const ir_op*)key;
188
189         return e1->code != e2->code;
190 }  /* opcode_cmp_2 */
191
192 /**
193  * Compare two elements of the address_mark set.
194  */
195 static int address_mark_cmp(const void *elt, const void *key, size_t size)
196 {
197         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
198         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
199         (void) size;
200
201         /* compare only the nodes, the rest is used as data container */
202         return e1->node != e2->node;
203 }  /* address_mark_cmp */
204
205 /**
206  * Clear all counter in a node_entry_t.
207  */
208 static void opcode_clear_entry(node_entry_t *elem)
209 {
210         cnt_clr(&elem->cnt_alive);
211         cnt_clr(&elem->new_node);
212         cnt_clr(&elem->into_Id);
213         cnt_clr(&elem->normalized);
214 }  /* opcode_clear_entry */
215
216 /**
217  * Returns the associates node_entry_t for an ir_op (and allocates
218  * one if not yet available).
219  *
220  * @param op    the IR operation
221  * @param hmap  a hash map containing ir_op* -> node_entry_t*
222  */
223 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
224 {
225         node_entry_t key;
226         node_entry_t *elem;
227
228         key.op = op;
229
230         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
231         if (elem)
232                 return elem;
233
234         elem = OALLOCZ(&status->cnts, node_entry_t);
235
236         /* clear counter */
237         opcode_clear_entry(elem);
238
239         elem->op = op;
240
241         return (node_entry_t*)pset_insert(hmap, elem, op->code);
242 }  /* opcode_get_entry */
243
244 /**
245  * Returns the associates ir_op for an opcode
246  *
247  * @param code  the IR opcode
248  * @param hmap  the hash map containing opcode -> ir_op*
249  */
250 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
251 {
252         ir_op key;
253
254         key.code = code;
255         return (ir_op*)pset_find(hmap, &key, code);
256 }  /* opcode_find_entry */
257
258 /**
259  * Clears all counter in a graph_entry_t.
260  *
261  * @param elem  the graph entry
262  * @param all   if non-zero, clears all counters, else leave accumulated ones
263  */
264 static void graph_clear_entry(graph_entry_t *elem, int all)
265 {
266         int i;
267
268         /* clear accumulated / non-accumulated counter */
269         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
270                 cnt_clr(&elem->cnt[i]);
271         }  /* for */
272
273         if (elem->block_hash) {
274                 del_pset(elem->block_hash);
275                 elem->block_hash = NULL;
276         }  /* if */
277
278         obstack_free(&elem->recalc_cnts, NULL);
279         obstack_init(&elem->recalc_cnts);
280 }  /* graph_clear_entry */
281
282 /**
283  * Returns the associated graph_entry_t for an IR graph.
284  *
285  * @param irg   the IR graph, NULL for the global counter
286  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
287  */
288 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
289 {
290         graph_entry_t key;
291         graph_entry_t *elem;
292         size_t i;
293
294         key.irg = irg;
295
296         elem = (graph_entry_t*)pset_find(hmap, &key, hash_ptr(irg));
297
298         if (elem) {
299                 /* create hash map backend block information */
300                 if (! elem->be_block_hash)
301                         elem->be_block_hash = new_pset(be_block_cmp, 5);
302
303                 return elem;
304         }  /* if */
305
306         /* allocate a new one */
307         elem = OALLOCZ(&status->cnts, graph_entry_t);
308         obstack_init(&elem->recalc_cnts);
309
310         /* clear counter */
311         graph_clear_entry(elem, 1);
312
313         /* new hash table for opcodes here  */
314         elem->opcode_hash   = new_pset(opcode_cmp, 5);
315         elem->address_mark  = new_set(address_mark_cmp, 5);
316         elem->irg           = irg;
317
318         /* these hash tables are created on demand */
319         elem->block_hash = NULL;
320
321         for (i = 0; i != ARRAY_SIZE(elem->opt_hash); ++i)
322                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
323
324         return (graph_entry_t*)pset_insert(hmap, elem, hash_ptr(irg));
325 }  /* graph_get_entry */
326
327 /**
328  * Clear all counter in an opt_entry_t.
329  */
330 static void opt_clear_entry(opt_entry_t *elem)
331 {
332         cnt_clr(&elem->count);
333 }  /* opt_clear_entry */
334
335 /**
336  * Returns the associated opt_entry_t for an IR operation.
337  *
338  * @param op    the IR operation
339  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
340  */
341 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
342 {
343         opt_entry_t key;
344         opt_entry_t *elem;
345
346         key.op = op;
347
348         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
349         if (elem)
350                 return elem;
351
352         elem = OALLOCZ(&status->cnts, opt_entry_t);
353
354         /* clear new counter */
355         opt_clear_entry(elem);
356
357         elem->op = op;
358
359         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
360 }  /* opt_get_entry */
361
362 /**
363  * clears all counter in a block_entry_t
364  */
365 static void block_clear_entry(block_entry_t *elem)
366 {
367         int i;
368
369         for (i = 0; i < _bcnt_last; ++i)
370                 cnt_clr(&elem->cnt[i]);
371 }  /* block_clear_entry */
372
373 /**
374  * Returns the associated block_entry_t for an block.
375  *
376  * @param block_nr  an IR  block number
377  * @param hmap      a hash map containing long -> block_entry_t
378  */
379 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
380 {
381         block_entry_t key;
382         block_entry_t *elem;
383
384         key.block_nr = block_nr;
385
386         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
387         if (elem)
388                 return elem;
389
390         elem = OALLOCZ(obst, block_entry_t);
391
392         /* clear new counter */
393         block_clear_entry(elem);
394
395         elem->block_nr = block_nr;
396
397         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
398 }  /* block_get_entry */
399
400 /**
401  * Clear all sets in be_block_entry_t.
402  */
403 static void be_block_clear_entry(be_block_entry_t *elem)
404 {
405         if (elem->reg_pressure)
406                 del_pset(elem->reg_pressure);
407
408         if (elem->sched_ready)
409                 stat_delete_distrib_tbl(elem->sched_ready);
410
411         if (elem->perm_class_stat)
412                 del_pset(elem->perm_class_stat);
413
414         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
415         elem->sched_ready     = stat_new_int_distrib_tbl();
416         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
417 }  /* be_block_clear_entry */
418
419 /**
420  * Returns the associated be_block_entry_t for an block.
421  *
422  * @param block_nr  an IR  block number
423  * @param hmap      a hash map containing long -> be_block_entry_t
424  */
425 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
426 {
427         be_block_entry_t key;
428         be_block_entry_t *elem;
429
430         key.block_nr = block_nr;
431
432         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
433         if (elem)
434                 return elem;
435
436         elem = OALLOCZ(obst, be_block_entry_t);
437
438         /* clear new counter */
439         be_block_clear_entry(elem);
440
441         elem->block_nr = block_nr;
442
443         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
444 }  /* be_block_get_entry */
445
446 /**
447  * clears all sets in perm_class_entry_t
448  */
449 static void perm_class_clear_entry(perm_class_entry_t *elem)
450 {
451         if (elem->perm_stat)
452                 del_pset(elem->perm_stat);
453
454         elem->perm_stat = new_pset(perm_stat_cmp, 5);
455 }  /* perm_class_clear_entry */
456
457 /**
458  * Returns the associated perm_class entry for a register class.
459  *
460  * @param class_name  the register class name
461  * @param hmap        a hash map containing class_name -> perm_class_entry_t
462  */
463 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
464                                                 hmap_perm_class_entry_t *hmap)
465 {
466         perm_class_entry_t key;
467         perm_class_entry_t *elem;
468
469         key.class_name = class_name;
470
471         elem = (perm_class_entry_t*)pset_find(hmap, &key, hash_ptr(class_name));
472         if (elem)
473                 return elem;
474
475         elem = OALLOCZ(obst, perm_class_entry_t);
476
477         /* clear new counter */
478         perm_class_clear_entry(elem);
479
480         elem->class_name = class_name;
481
482         return (perm_class_entry_t*)pset_insert(hmap, elem, hash_ptr(class_name));
483 }  /* perm_class_get_entry */
484
485 /**
486  * clears all sets in perm_stat_entry_t
487  */
488 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
489 {
490         if (elem->chains)
491                 stat_delete_distrib_tbl(elem->chains);
492
493         if (elem->cycles)
494                 stat_delete_distrib_tbl(elem->cycles);
495
496         elem->chains = stat_new_int_distrib_tbl();
497         elem->cycles = stat_new_int_distrib_tbl();
498 }  /* perm_stat_clear_entry */
499
500 /**
501  * Returns the associated perm_stat entry for a perm.
502  *
503  * @param perm      the perm node
504  * @param hmap      a hash map containing perm -> perm_stat_entry_t
505  */
506 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
507 {
508         perm_stat_entry_t key;
509         perm_stat_entry_t *elem;
510
511         key.perm = perm;
512
513         elem = (perm_stat_entry_t*)pset_find(hmap, &key, hash_ptr(perm));
514         if (elem)
515                 return elem;
516
517         elem = OALLOCZ(obst, perm_stat_entry_t);
518
519         /* clear new counter */
520         perm_stat_clear_entry(elem);
521
522         elem->perm = perm;
523
524         return (perm_stat_entry_t*)pset_insert(hmap, elem, hash_ptr(perm));
525 }  /* perm_stat_get_entry */
526
527 /**
528  * Clear optimizations counter,
529  */
530 static void clear_optimization_counter(void)
531 {
532         int i;
533         for (i = 0; i < FS_OPT_MAX; ++i)
534                 cnt_clr(&status->num_opts[i]);
535 }
536
537 /**
538  * Returns the ir_op for an IR-node,
539  * handles special cases and return pseudo op codes.
540  *
541  * @param none  an IR node
542  */
543 static ir_op *stat_get_irn_op(ir_node *node)
544 {
545         ir_op *op = get_irn_op(node);
546         unsigned opc = op->code;
547
548         switch (opc) {
549         case iro_Phi:
550                 if (get_irn_arity(node) == 0) {
551                         /* special case, a Phi0 node, count on extra counter */
552                         op = status->op_Phi0 ? status->op_Phi0 : op;
553                 } else if (get_irn_mode(node) == mode_M) {
554                         /* special case, a Memory Phi node, count on extra counter */
555                         op = status->op_PhiM ? status->op_PhiM : op;
556                 }  /* if */
557                 break;
558         case iro_Proj:
559                 if (get_irn_mode(node) == mode_M) {
560                         /* special case, a Memory Proj node, count on extra counter */
561                         op = status->op_ProjM ? status->op_ProjM : op;
562                 }  /* if */
563                 break;
564         case iro_Mul:
565                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
566                         /* special case, a Multiply by a const, count on extra counter */
567                         op = status->op_MulC ? status->op_MulC : op;
568                 }  /* if */
569                 break;
570         case iro_Div:
571                 if (is_Const(get_Div_right(node))) {
572                         /* special case, a division by a const, count on extra counter */
573                         op = status->op_DivC ? status->op_DivC : op;
574                 }  /* if */
575                 break;
576         case iro_Mod:
577                 if (is_Const(get_Mod_right(node))) {
578                         /* special case, a module by a const, count on extra counter */
579                         op = status->op_ModC ? status->op_ModC : op;
580                 }  /* if */
581                 break;
582         case iro_Sel:
583                 if (is_Sel(get_Sel_ptr(node))) {
584                         /* special case, a Sel of a Sel, count on extra counter */
585                         op = status->op_SelSel ? status->op_SelSel : op;
586                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
587                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
588                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
589                         }  /* if */
590                 }  /* if */
591                 break;
592         default:
593                 break;
594         }  /* switch */
595
596         return op;
597 }  /* stat_get_irn_op */
598
599 /**
600  * update the block counter
601  */
602 static void undate_block_info(ir_node *node, graph_entry_t *graph)
603 {
604         ir_op *op = get_irn_op(node);
605         ir_node *block;
606         block_entry_t *b_entry;
607         int i, arity;
608
609         /* check for block */
610         if (op == op_Block) {
611                 arity = get_irn_arity(node);
612                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
613                 /* mark start end block to allow to filter them out */
614                 if (node == get_irg_start_block(graph->irg))
615                         b_entry->is_start = 1;
616                 else if (node == get_irg_end_block(graph->irg))
617                         b_entry->is_end = 1;
618
619                 /* count all incoming edges */
620                 for (i = 0; i < arity; ++i) {
621                         ir_node *pred = get_irn_n(node, i);
622                         ir_node *other_block = get_nodes_block(pred);
623                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
624
625                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
626                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
627                 }  /* for */
628                 return;
629         }  /* if */
630
631         block   = get_nodes_block(node);
632         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
633
634         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
635                 /* count data Phi per block */
636                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
637         }  /* if */
638
639         /* we have a new node in our block */
640         cnt_inc(&b_entry->cnt[bcnt_nodes]);
641
642         /* don't count keep-alive edges */
643         if (is_End(node))
644                 return;
645
646         arity = get_irn_arity(node);
647
648         for (i = 0; i < arity; ++i) {
649                 ir_node *pred = get_irn_n(node, i);
650                 ir_node *other_block;
651
652                 other_block = get_nodes_block(pred);
653
654                 if (other_block == block)
655                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
656                 else {
657                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
658
659                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
660                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
661                 }  /* if */
662         }  /* for */
663 }  /* undate_block_info */
664
665 /**
666  * Calculates how many arguments of the call are const, updates
667  * param distribution.
668  */
669 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
670 {
671         int i, num_const_args = 0, num_local_adr = 0;
672         int n = get_Call_n_params(call);
673
674         for (i = 0; i < n; ++i) {
675                 ir_node *param = get_Call_param(call, i);
676
677                 if (is_irn_constlike(param))
678                         ++num_const_args;
679                 else if (is_Sel(param)) {
680                         ir_node *base = param;
681
682                         do {
683                                 base = get_Sel_ptr(base);
684                         } while (is_Sel(base));
685
686                         if (base == get_irg_frame(current_ir_graph))
687                                 ++num_local_adr;
688                 }
689
690         }  /* for */
691
692         if (num_const_args > 0)
693                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
694         if (num_const_args == n)
695                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
696         if (num_local_adr > 0)
697                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
698
699         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
700 }  /* analyse_params_of_Call */
701
702 /**
703  * Update info on calls.
704  *
705  * @param call   The call
706  * @param graph  The graph entry containing the call
707  */
708 static void stat_update_call(ir_node *call, graph_entry_t *graph)
709 {
710         ir_node   *block = get_nodes_block(call);
711         ir_node   *ptr = get_Call_ptr(call);
712         ir_entity *ent = NULL;
713         ir_graph  *callee = NULL;
714
715         /*
716          * If the block is bad, the whole subgraph will collapse later
717          * so do not count this call.
718          * This happens in dead code.
719          */
720         if (is_Bad(block))
721                 return;
722
723         cnt_inc(&graph->cnt[gcnt_all_calls]);
724
725         /* found a call, this function is not a leaf */
726         graph->is_leaf = 0;
727
728         if (is_SymConst(ptr)) {
729                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
730                         /* ok, we seems to know the entity */
731                         ent = get_SymConst_entity(ptr);
732                         callee = get_entity_irg(ent);
733
734                         /* it is recursive, if it calls at least once */
735                         if (callee == graph->irg)
736                                 graph->is_recursive = 1;
737                         if (callee == NULL)
738                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
739                 }  /* if */
740         } else {
741                 /* indirect call, be could not predict */
742                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
743
744                 /* NOT a leaf call */
745                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
746         }  /* if */
747
748         /* check, if it's a chain-call: Then, the call-block
749          * must dominate the end block. */
750         {
751                 ir_node *curr = get_irg_end_block(graph->irg);
752                 int depth = get_Block_dom_depth(block);
753
754                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
755                         curr = get_Block_idom(curr);
756
757                         if (! curr || !is_Block(curr))
758                                 break;
759                 }  /* for */
760
761                 if (curr != block)
762                         graph->is_chain_call = 0;
763         }
764
765         /* check, if the callee is a leaf */
766         if (callee) {
767                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
768
769                 if (called->is_analyzed) {
770                         if (! called->is_leaf)
771                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
772                 }  /* if */
773         }  /* if */
774
775         analyse_params_of_Call(graph, call);
776 }  /* stat_update_call */
777
778 /**
779  * Update info on calls for graphs on the wait queue.
780  */
781 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
782 {
783         ir_node   *block = get_nodes_block(call);
784         ir_node   *ptr = get_Call_ptr(call);
785         ir_entity *ent = NULL;
786         ir_graph  *callee = NULL;
787
788         /*
789          * If the block is bad, the whole subgraph will collapse later
790          * so do not count this call.
791          * This happens in dead code.
792          */
793         if (is_Bad(block))
794                 return;
795
796         if (is_SymConst(ptr)) {
797                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
798                         /* ok, we seems to know the entity */
799                         ent = get_SymConst_entity(ptr);
800                         callee = get_entity_irg(ent);
801                 }  /* if */
802         }  /* if */
803
804         /* check, if the callee is a leaf */
805         if (callee) {
806                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
807
808                 assert(called->is_analyzed);
809
810                 if (! called->is_leaf)
811                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
812         } else
813                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
814 }  /* stat_update_call_2 */
815
816 /**
817  * Find the base address and entity of an Sel node.
818  *
819  * @param sel  the node
820  *
821  * @return the base address.
822  */
823 static ir_node *find_base_adr(ir_node *sel)
824 {
825         ir_node *ptr = get_Sel_ptr(sel);
826
827         while (is_Sel(ptr)) {
828                 sel = ptr;
829                 ptr = get_Sel_ptr(sel);
830         }
831         return ptr;
832 }  /* find_base_adr */
833
834 /**
835  * Update info on Load/Store address statistics.
836  */
837 static void stat_update_address(ir_node *node, graph_entry_t *graph)
838 {
839         unsigned opc = get_irn_opcode(node);
840         ir_node *base;
841         ir_graph *irg;
842
843         switch (opc) {
844         case iro_SymConst:
845                 /* a global address */
846                 cnt_inc(&graph->cnt[gcnt_global_adr]);
847                 break;
848         case iro_Sel:
849                 base = find_base_adr(node);
850                 irg = current_ir_graph;
851                 if (base == get_irg_frame(irg)) {
852                         /* a local Variable. */
853                         cnt_inc(&graph->cnt[gcnt_local_adr]);
854                 } else {
855                         /* Pointer access */
856                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
857                                 /* pointer access through parameter, check for THIS */
858                                 ir_entity *ent = get_irg_entity(irg);
859
860                                 if (ent != NULL) {
861                                         ir_type *ent_tp = get_entity_type(ent);
862
863                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
864                                                 if (get_Proj_proj(base) == 0) {
865                                                         /* THIS pointer */
866                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
867                                                         goto end_parameter;
868                                                 }  /* if */
869                                         }  /* if */
870                                 }  /* if */
871                                 /* other parameter */
872                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
873 end_parameter: ;
874                         } else {
875                                 /* unknown Pointer access */
876                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
877                         }  /* if */
878                 }  /* if */
879         default:
880                 break;
881         }  /* switch */
882 }  /* stat_update_address */
883
884 /**
885  * Walker for reachable nodes count.
886  */
887 static void update_node_stat(ir_node *node, void *env)
888 {
889         graph_entry_t *graph = (graph_entry_t*)env;
890         node_entry_t *entry;
891
892         ir_op *op = stat_get_irn_op(node);
893         int i, arity = get_irn_arity(node);
894
895         entry = opcode_get_entry(op, graph->opcode_hash);
896
897         cnt_inc(&entry->cnt_alive);
898         cnt_add_i(&graph->cnt[gcnt_edges], arity);
899
900         /* count block edges */
901         undate_block_info(node, graph);
902
903         /* handle statistics for special node types */
904
905         switch (op->code) {
906         case iro_Call:
907                 /* check for properties that depends on calls like recursion/leaf/indirect call */
908                 stat_update_call(node, graph);
909                 break;
910         case iro_Load:
911                 /* check address properties */
912                 stat_update_address(get_Load_ptr(node), graph);
913                 break;
914         case iro_Store:
915                 /* check address properties */
916                 stat_update_address(get_Store_ptr(node), graph);
917                 break;
918         case iro_Phi:
919                 /* check for non-strict Phi nodes */
920                 for (i = arity - 1; i >= 0; --i) {
921                         ir_node *pred = get_Phi_pred(node, i);
922                         if (is_Unknown(pred)) {
923                                 /* found an Unknown predecessor, graph is not strict */
924                                 graph->is_strict = 0;
925                                 break;
926                         }
927                 }
928         default:
929                 break;
930         }  /* switch */
931
932         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
933         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
934                 int i;
935
936                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
937                         ir_node *pred = get_irn_n(node, i);
938
939                         if (is_Const(pred)) {
940                                 /* check properties of constants */
941                                 stat_update_const(status, pred, graph);
942                         }  /* if */
943                 }  /* for */
944         }  /* if */
945 }  /* update_node_stat */
946
947 /**
948  * Walker for reachable nodes count for graphs on the wait_q.
949  */
950 static void update_node_stat_2(ir_node *node, void *env)
951 {
952         graph_entry_t *graph = (graph_entry_t*)env;
953
954         /* check for properties that depends on calls like recursion/leaf/indirect call */
955         if (is_Call(node))
956                 stat_update_call_2(node, graph);
957 }  /* update_node_stat_2 */
958
959 /**
960  * Get the current address mark.
961  */
962 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
963 {
964         address_mark_entry_t const val = { node, 0 };
965         address_mark_entry_t *value = set_find(address_mark_entry_t, graph->address_mark, &val, sizeof(val), hash_ptr(node));
966
967         return value ? value->mark : 0;
968 }  /* get_adr_mark */
969
970 /**
971  * Set the current address mark.
972  */
973 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
974 {
975         address_mark_entry_t const value = { node, val };
976         set_insert(address_mark_entry_t, graph->address_mark, &value, sizeof(value), hash_ptr(node));
977 }  /* set_adr_mark */
978
979 #undef DUMP_ADR_MODE
980
981 #ifdef DUMP_ADR_MODE
982 /**
983  * a vcg attribute hook: Color a node with a different color if
984  * it's identified as a part of an address expression or at least referenced
985  * by an address expression.
986  */
987 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
988 {
989         ir_node *n           = local ? local : node;
990         ir_graph *irg        = get_irn_irg(n);
991         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
992         unsigned mark        = get_adr_mark(graph, n);
993
994         if (mark & MARK_ADDRESS_CALC)
995                 fprintf(F, "color: purple");
996         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
997                 fprintf(F, "color: pink");
998         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
999                 fprintf(F, "color: lightblue");
1000         else
1001                 return 0;
1002
1003         /* I know the color! */
1004         return 1;
1005 }  /* stat_adr_mark_hook */
1006 #endif /* DUMP_ADR_MODE */
1007
1008 /**
1009  * Return the "operational" mode of a Firm node.
1010  */
1011 static ir_mode *get_irn_op_mode(ir_node *node)
1012 {
1013         switch (get_irn_opcode(node)) {
1014         case iro_Load:
1015                 return get_Load_mode(node);
1016         case iro_Store:
1017                 return get_irn_mode(get_Store_value(node));
1018         case iro_Div:
1019                 return get_irn_mode(get_Div_left(node));
1020         case iro_Mod:
1021                 return get_irn_mode(get_Mod_left(node));
1022         case iro_Cmp:
1023                 /* Cmp is no address calculation, or is it? */
1024         default:
1025                 return get_irn_mode(node);
1026         }  /* switch */
1027 }  /* get_irn_op_mode */
1028
1029 /**
1030  * Post-walker that marks every node that is an address calculation.
1031  *
1032  * Users of a node must be visited first. We ensure this by
1033  * calling it in the post of an outs walk. This should work even in cycles,
1034  * while the normal pre-walk will not.
1035  */
1036 static void mark_address_calc(ir_node *node, void *env)
1037 {
1038         graph_entry_t *graph = (graph_entry_t*)env;
1039         ir_mode *mode = get_irn_op_mode(node);
1040         int i, n;
1041         unsigned mark_preds = MARK_REF_NON_ADR;
1042
1043         if (! mode_is_data(mode))
1044                 return;
1045
1046         if (mode_is_reference(mode)) {
1047                 /* a reference is calculated here, we are sure */
1048                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1049
1050                 mark_preds = MARK_REF_ADR;
1051         } else {
1052                 unsigned mark = get_adr_mark(graph, node);
1053
1054                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1055                         /*
1056                          * this node has no reference mode, but is only
1057                          * referenced by address calculations
1058                          */
1059                         mark_preds = MARK_REF_ADR;
1060                 }  /* if */
1061         }  /* if */
1062
1063         /* mark all predecessors */
1064         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1065                 ir_node *pred = get_irn_n(node, i);
1066
1067                 mode = get_irn_op_mode(pred);
1068                 if (! mode_is_data(mode))
1069                         continue;
1070
1071                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1072         }  /* for */
1073 }  /* mark_address_calc */
1074
1075 /**
1076  * Post-walker that marks every node that is an address calculation.
1077  *
1078  * Users of a node must be visited first. We ensure this by
1079  * calling it in the post of an outs walk. This should work even in cycles,
1080  * while the normal pre-walk will not.
1081  */
1082 static void count_adr_ops(ir_node *node, void *env)
1083 {
1084         graph_entry_t *graph = (graph_entry_t*)env;
1085         unsigned mark        = get_adr_mark(graph, node);
1086
1087         if (mark & MARK_ADDRESS_CALC)
1088                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1089         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1090                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1091         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1092                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1093 }  /* count_adr_ops */
1094
1095 /**
1096  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1097  * is called, must recalculate all statistic info.
1098  *
1099  * @param global    The global entry
1100  * @param graph     The current entry
1101  */
1102 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1103 {
1104         node_entry_t *entry;
1105         int i;
1106
1107         /* clear first the alive counter in the graph */
1108         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1109                 cnt_clr(&entry->cnt_alive);
1110         }  /* foreach_pset */
1111
1112         /* set pessimistic values */
1113         graph->is_leaf       = 1;
1114         graph->is_leaf_call  = LCS_UNKNOWN;
1115         graph->is_recursive  = 0;
1116         graph->is_chain_call = 1;
1117         graph->is_strict     = 1;
1118
1119         /* create new block counter */
1120         graph->block_hash = new_pset(block_cmp, 5);
1121
1122         /* we need dominator info */
1123         if (graph->irg != get_const_code_irg()) {
1124                 assure_doms(graph->irg);
1125         }  /* if */
1126
1127         /* count the nodes in the graph */
1128         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1129
1130 #if 0
1131         /* Uncomment this code if chain-call means call exact one. */
1132         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1133
1134         /* check if we have more than 1 call */
1135         if (cnt_gt(entry->cnt_alive, 1))
1136                 graph->is_chain_call = 0;
1137 #endif
1138
1139         /* recursive functions are never chain calls, leafs don't have calls */
1140         if (graph->is_recursive || graph->is_leaf)
1141                 graph->is_chain_call = 0;
1142
1143         /* assume we walk every graph only ONCE, we could sum here the global count */
1144         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1145                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1146
1147                 /* update the node counter */
1148                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1149         }  /* foreach_pset */
1150
1151         /* count the number of address calculation */
1152         if (graph->irg != get_const_code_irg()) {
1153                 ir_graph *rem = current_ir_graph;
1154
1155                 assure_irg_outs(graph->irg);
1156
1157                 /* Must be done an the outs graph */
1158                 current_ir_graph = graph->irg;
1159                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1160                 current_ir_graph = rem;
1161
1162 #ifdef DUMP_ADR_MODE
1163                 /* register the vcg hook and dump the graph for test */
1164                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1165                 dump_ir_block_graph(graph->irg, "-adr");
1166                 set_dump_node_vcgattr_hook(NULL);
1167 #endif /* DUMP_ADR_MODE */
1168
1169                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1170         }  /* if */
1171
1172         /* count the DAG's */
1173         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1174                 count_dags_in_graph(global, graph);
1175
1176         /* calculate the patterns of this graph */
1177         stat_calc_pattern_history(graph->irg);
1178
1179         /* leaf function did not call others */
1180         if (graph->is_leaf)
1181                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1182         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1183                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1184                 pdeq_putl(status->wait_q, graph);
1185         }  /* if */
1186
1187         /* we have analyzed this graph */
1188         graph->is_analyzed = 1;
1189
1190         /* accumulate all counter's */
1191         for (i = 0; i < _gcnt_last; ++i)
1192                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1193 }  /* update_graph_stat */
1194
1195 /**
1196  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1197  * must finish all statistic info calculations.
1198  *
1199  * @param global    The global entry
1200  * @param graph     The current entry
1201  */
1202 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1203 {
1204         (void) global;
1205         if (graph->is_deleted) {
1206                 /* deleted, ignore */
1207                 return;
1208         }
1209
1210         if (graph->irg) {
1211                 /* count the nodes in the graph */
1212                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1213
1214                 if (graph->is_leaf_call == LCS_UNKNOWN)
1215                         graph->is_leaf_call = LCS_LEAF_CALL;
1216         }  /* if */
1217 }  /* update_graph_stat_2 */
1218
1219 /**
1220  * Register a dumper.
1221  */
1222 static void stat_register_dumper(const dumper_t *dumper)
1223 {
1224         dumper_t *p = XMALLOC(dumper_t);
1225         *p = *dumper;
1226
1227         p->next        = status->dumper;
1228         p->status      = status;
1229         status->dumper = p;
1230
1231         /* FIXME: memory leak */
1232 }  /* stat_register_dumper */
1233
1234 /**
1235  * Dumps the statistics of an IR graph.
1236  */
1237 static void stat_dump_graph(graph_entry_t *entry)
1238 {
1239         dumper_t *dumper;
1240
1241         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1242                 if (dumper->dump_graph)
1243                         dumper->dump_graph(dumper, entry);
1244         }  /* for */
1245 }  /* stat_dump_graph */
1246
1247 /**
1248  * Calls all registered dumper functions.
1249  */
1250 static void stat_dump_registered(graph_entry_t *entry)
1251 {
1252         dumper_t *dumper;
1253
1254         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1255                 if (dumper->func_map) {
1256                         dump_graph_FUNC func;
1257
1258                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1259                                 func(dumper, entry);
1260                 }  /* if */
1261         }  /* for */
1262 }  /* stat_dump_registered */
1263
1264 /**
1265  * Dumps a constant table.
1266  */
1267 static void stat_dump_consts(const constant_info_t *tbl)
1268 {
1269         dumper_t *dumper;
1270
1271         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1272                 if (dumper->dump_const_tbl)
1273                         dumper->dump_const_tbl(dumper, tbl);
1274         }  /* for */
1275 }  /* stat_dump_consts */
1276
1277 /**
1278  * Dumps the parameter distribution
1279  */
1280 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1281 {
1282         dumper_t *dumper;
1283
1284         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1285                 if (dumper->dump_param_tbl)
1286                         dumper->dump_param_tbl(dumper, tbl, global);
1287         }  /* for */
1288 }  /* stat_dump_param_tbl */
1289
1290 /**
1291  * Dumps the optimization counter
1292  */
1293 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1294 {
1295         dumper_t *dumper;
1296
1297         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1298                 if (dumper->dump_opt_cnt)
1299                         dumper->dump_opt_cnt(dumper, tbl, len);
1300         }  /* for */
1301 }  /* stat_dump_opt_cnt */
1302
1303 /**
1304  * Initialize the dumper.
1305  */
1306 static void stat_dump_init(const char *name)
1307 {
1308         dumper_t *dumper;
1309
1310         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1311                 if (dumper->init)
1312                         dumper->init(dumper, name);
1313         }  /* for */
1314 }  /* stat_dump_init */
1315
1316 /**
1317  * Finish the dumper.
1318  */
1319 static void stat_dump_finish(void)
1320 {
1321         dumper_t *dumper;
1322
1323         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1324                 if (dumper->finish)
1325                         dumper->finish(dumper);
1326         }  /* for */
1327 }  /* stat_dump_finish */
1328
1329 /**
1330  * Register an additional function for all dumper.
1331  */
1332 void stat_register_dumper_func(dump_graph_FUNC func)
1333 {
1334         dumper_t *dumper;
1335
1336         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1337                 if (! dumper->func_map)
1338                         dumper->func_map = pset_new_ptr(3);
1339                 pset_insert_ptr(dumper->func_map, (void*)func);
1340         }  /* for */
1341 }  /* stat_register_dumper_func */
1342
1343 /* ---------------------------------------------------------------------- */
1344
1345 /*
1346  * Helper: get an ir_op from an opcode.
1347  */
1348 ir_op *stat_get_op_from_opcode(unsigned code)
1349 {
1350         return opcode_find_entry((ir_opcode)code, status->ir_op_hash);
1351 }  /* stat_get_op_from_opcode */
1352
1353 /**
1354  * Hook: A new IR op is registered.
1355  *
1356  * @param ctx  the hook context
1357  * @param op   the new IR opcode that was created.
1358  */
1359 static void stat_new_ir_op(void *ctx, ir_op *op)
1360 {
1361         (void) ctx;
1362         if (! status->stat_options)
1363                 return;
1364
1365         STAT_ENTER;
1366         {
1367                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1368
1369                 /* execute for side effect :-) */
1370                 (void)opcode_get_entry(op, graph->opcode_hash);
1371
1372                 pset_insert(status->ir_op_hash, op, op->code);
1373         }
1374         STAT_LEAVE;
1375 }  /* stat_new_ir_op */
1376
1377 /**
1378  * Hook: An IR op is freed.
1379  *
1380  * @param ctx  the hook context
1381  * @param op   the IR opcode that is freed
1382  */
1383 static void stat_free_ir_op(void *ctx, ir_op *op)
1384 {
1385         (void) ctx;
1386         (void) op;
1387         if (! status->stat_options)
1388                 return;
1389
1390         STAT_ENTER;
1391         {
1392         }
1393         STAT_LEAVE;
1394 }  /* stat_free_ir_op */
1395
1396 /**
1397  * Hook: A new node is created.
1398  *
1399  * @param ctx   the hook context
1400  * @param irg   the IR graph on which the node is created
1401  * @param node  the new IR node that was created
1402  */
1403 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1404 {
1405         (void) ctx;
1406         (void) irg;
1407         if (! status->stat_options)
1408                 return;
1409
1410         /* do NOT count during dead node elimination */
1411         if (status->in_dead_node_elim)
1412                 return;
1413
1414         STAT_ENTER;
1415         {
1416                 node_entry_t *entry;
1417                 graph_entry_t *graph;
1418                 ir_op *op = stat_get_irn_op(node);
1419
1420                 /* increase global value */
1421                 graph = graph_get_entry(NULL, status->irg_hash);
1422                 entry = opcode_get_entry(op, graph->opcode_hash);
1423                 cnt_inc(&entry->new_node);
1424
1425                 /* increase local value */
1426                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1427                 entry = opcode_get_entry(op, graph->opcode_hash);
1428                 cnt_inc(&entry->new_node);
1429         }
1430         STAT_LEAVE;
1431 }  /* stat_new_node */
1432
1433 /**
1434  * Hook: A node is changed into a Id node
1435  *
1436  * @param ctx   the hook context
1437  * @param node  the IR node that will be turned into an ID
1438  */
1439 static void stat_turn_into_id(void *ctx, ir_node *node)
1440 {
1441         (void) ctx;
1442         if (! status->stat_options)
1443                 return;
1444
1445         STAT_ENTER;
1446         {
1447                 node_entry_t *entry;
1448                 graph_entry_t *graph;
1449                 ir_op *op = stat_get_irn_op(node);
1450
1451                 /* increase global value */
1452                 graph = graph_get_entry(NULL, status->irg_hash);
1453                 entry = opcode_get_entry(op, graph->opcode_hash);
1454                 cnt_inc(&entry->into_Id);
1455
1456                 /* increase local value */
1457                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1458                 entry = opcode_get_entry(op, graph->opcode_hash);
1459                 cnt_inc(&entry->into_Id);
1460         }
1461         STAT_LEAVE;
1462 }  /* stat_turn_into_id */
1463
1464 /**
1465  * Hook: A node is normalized
1466  *
1467  * @param ctx   the hook context
1468  * @param node  the IR node that was normalized
1469  */
1470 static void stat_normalize(void *ctx, ir_node *node)
1471 {
1472         (void) ctx;
1473         if (! status->stat_options)
1474                 return;
1475
1476         STAT_ENTER;
1477         {
1478                 node_entry_t *entry;
1479                 graph_entry_t *graph;
1480                 ir_op *op = stat_get_irn_op(node);
1481
1482                 /* increase global value */
1483                 graph = graph_get_entry(NULL, status->irg_hash);
1484                 entry = opcode_get_entry(op, graph->opcode_hash);
1485                 cnt_inc(&entry->normalized);
1486
1487                 /* increase local value */
1488                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1489                 entry = opcode_get_entry(op, graph->opcode_hash);
1490                 cnt_inc(&entry->normalized);
1491         }
1492         STAT_LEAVE;
1493 }  /* stat_normalize */
1494
1495 /**
1496  * Hook: A new graph was created
1497  *
1498  * @param ctx  the hook context
1499  * @param irg  the new IR graph that was created
1500  * @param ent  the entity of this graph
1501  */
1502 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1503 {
1504         (void) ctx;
1505         if (! status->stat_options)
1506                 return;
1507
1508         STAT_ENTER;
1509         {
1510                 /* execute for side effect :-) */
1511                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1512
1513                 graph->ent           = ent;
1514                 graph->is_deleted    = 0;
1515                 graph->is_leaf       = 0;
1516                 graph->is_leaf_call  = 0;
1517                 graph->is_recursive  = 0;
1518                 graph->is_chain_call = 0;
1519                 graph->is_strict     = 1;
1520                 graph->is_analyzed   = 0;
1521         }
1522         STAT_LEAVE;
1523 }  /* stat_new_graph */
1524
1525 /**
1526  * Hook: A graph will be deleted
1527  *
1528  * @param ctx  the hook context
1529  * @param irg  the IR graph that will be deleted
1530  *
1531  * Note that we still hold the information for this graph
1532  * in our hash maps, only a flag is set which prevents this
1533  * information from being changed, it's "frozen" from now.
1534  */
1535 static void stat_free_graph(void *ctx, ir_graph *irg)
1536 {
1537         (void) ctx;
1538         if (! status->stat_options)
1539                 return;
1540
1541         STAT_ENTER;
1542         {
1543                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1544                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1545
1546                 graph->is_deleted = 1;
1547
1548                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1549                         /* count the nodes of the graph yet, it will be destroyed later */
1550                         update_graph_stat(global, graph);
1551                 }  /* if */
1552         }
1553         STAT_LEAVE;
1554 }  /* stat_free_graph */
1555
1556 /**
1557  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1558  *
1559  * @param ctx  the hook context
1560  * @param irg  the IR graph that will be walked
1561  * @param pre  the pre walker
1562  * @param post the post walker
1563  */
1564 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1565 {
1566         (void) ctx;
1567         (void) pre;
1568         (void) post;
1569         if (! status->stat_options)
1570                 return;
1571
1572         STAT_ENTER_SINGLE;
1573         {
1574                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1575
1576                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1577         }
1578         STAT_LEAVE;
1579 }  /* stat_irg_walk */
1580
1581 /**
1582  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1583  *
1584  * @param ctx  the hook context
1585  * @param irg  the IR graph that will be walked
1586  * @param pre  the pre walker
1587  * @param post the post walker
1588  */
1589 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1590 {
1591         /* for now, do NOT differentiate between blockwise and normal */
1592         stat_irg_walk(ctx, irg, pre, post);
1593 }  /* stat_irg_walk_blkwise */
1594
1595 /**
1596  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1597  *
1598  * @param ctx  the hook context
1599  * @param irg  the IR graph that will be walked
1600  * @param node the IR node
1601  * @param pre  the pre walker
1602  * @param post the post walker
1603  */
1604 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1605 {
1606         (void) ctx;
1607         (void) node;
1608         (void) pre;
1609         (void) post;
1610         if (! status->stat_options)
1611                 return;
1612
1613         STAT_ENTER_SINGLE;
1614         {
1615                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1616
1617                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1618         }
1619         STAT_LEAVE;
1620 }  /* stat_irg_block_walk */
1621
1622 /**
1623  * Called for every node that is removed due to an optimization.
1624  *
1625  * @param n     the IR node that will be removed
1626  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1627  * @param kind  the optimization kind
1628  */
1629 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1630 {
1631         opt_entry_t *entry;
1632         ir_op *op = stat_get_irn_op(n);
1633
1634         /* ignore CSE for Constants */
1635         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1636                 return;
1637
1638         /* increase global value */
1639         entry = opt_get_entry(op, hmap);
1640         cnt_inc(&entry->count);
1641 }  /* removed_due_opt */
1642
1643 /**
1644  * Hook: Some nodes were optimized into some others due to an optimization.
1645  *
1646  * @param ctx  the hook context
1647  */
1648 static void stat_merge_nodes(
1649     void *ctx,
1650     ir_node **new_node_array, int new_num_entries,
1651     ir_node **old_node_array, int old_num_entries,
1652     hook_opt_kind opt)
1653 {
1654         (void) ctx;
1655         if (! status->stat_options)
1656                 return;
1657
1658         STAT_ENTER;
1659         {
1660                 int i, j;
1661                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1662
1663                 cnt_inc(&status->num_opts[opt]);
1664                 if (status->reassoc_run)
1665                         opt = HOOK_OPT_REASSOC;
1666
1667                 for (i = 0; i < old_num_entries; ++i) {
1668                         /* nodes might be in new and old, so if we found a node
1669                            in both sets, this one  is NOT removed */
1670                         for (j = 0; j < new_num_entries; ++j) {
1671                                 if (old_node_array[i] == new_node_array[j])
1672                                         break;
1673                         }  /* for */
1674                         if (j >= new_num_entries) {
1675                                 int xopt = opt;
1676
1677                                 /* sometimes we did not detect, that it is replaced by a Const */
1678                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1679                                         ir_op *op = get_irn_op(new_node_array[0]);
1680
1681                                         if (op == op_Const || op == op_SymConst)
1682                                                 xopt = HOOK_OPT_CONFIRM_C;
1683                                 }  /* if */
1684
1685                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1686                         }  /* if */
1687                 }  /* for */
1688         }
1689         STAT_LEAVE;
1690 }  /* stat_merge_nodes */
1691
1692 /**
1693  * Hook: Reassociation is started/stopped.
1694  *
1695  * @param ctx   the hook context
1696  * @param flag  if non-zero, reassociation is started else stopped
1697  */
1698 static void stat_reassociate(void *ctx, int flag)
1699 {
1700         (void) ctx;
1701         if (! status->stat_options)
1702                 return;
1703
1704         STAT_ENTER;
1705         {
1706                 status->reassoc_run = flag;
1707         }
1708         STAT_LEAVE;
1709 }  /* stat_reassociate */
1710
1711 /**
1712  * Hook: A node was lowered into other nodes
1713  *
1714  * @param ctx  the hook context
1715  * @param node the IR node that will be lowered
1716  */
1717 static void stat_lower(void *ctx, ir_node *node)
1718 {
1719         (void) ctx;
1720         if (! status->stat_options)
1721                 return;
1722
1723         STAT_ENTER;
1724         {
1725                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1726
1727                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1728         }
1729         STAT_LEAVE;
1730 }  /* stat_lower */
1731
1732 /**
1733  * Hook: A graph was inlined.
1734  *
1735  * @param ctx  the hook context
1736  * @param call the IR call that will re changed into the body of
1737  *             the called IR graph
1738  * @param called_irg  the IR graph representing the called routine
1739  */
1740 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1741 {
1742         (void) ctx;
1743         if (! status->stat_options)
1744                 return;
1745
1746         STAT_ENTER;
1747         {
1748                 ir_graph *irg = get_irn_irg(call);
1749                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1750                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1751
1752                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1753                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1754         }
1755         STAT_LEAVE;
1756 }  /* stat_inline */
1757
1758 /**
1759  * Hook: A graph with tail-recursions was optimized.
1760  *
1761  * @param ctx  the hook context
1762  */
1763 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1764 {
1765         (void) ctx;
1766         if (! status->stat_options)
1767                 return;
1768
1769         STAT_ENTER;
1770         {
1771                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1772
1773                 graph->num_tail_recursion += n_calls;
1774         }
1775         STAT_LEAVE;
1776 }  /* stat_tail_rec */
1777
1778 /**
1779  * Strength reduction was performed on an iteration variable.
1780  *
1781  * @param ctx  the hook context
1782  */
1783 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1784 {
1785         (void) ctx;
1786         if (! status->stat_options)
1787                 return;
1788
1789         STAT_ENTER;
1790         {
1791                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1792                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1793
1794                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1795         }
1796         STAT_LEAVE;
1797 }  /* stat_strength_red */
1798
1799 /**
1800  * Hook: Start/Stop the dead node elimination.
1801  *
1802  * @param ctx  the hook context
1803  */
1804 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1805 {
1806         (void) ctx;
1807         (void) irg;
1808         if (! status->stat_options)
1809                 return;
1810
1811         status->in_dead_node_elim = (start != 0);
1812 }  /* stat_dead_node_elim */
1813
1814 /**
1815  * Hook: if-conversion was tried.
1816  */
1817 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1818                                int pos, ir_node *mux, if_result_t reason)
1819 {
1820         (void) context;
1821         (void) phi;
1822         (void) pos;
1823         (void) mux;
1824         if (! status->stat_options)
1825                 return;
1826
1827         STAT_ENTER;
1828         {
1829                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1830
1831                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1832         }
1833         STAT_LEAVE;
1834 }  /* stat_if_conversion */
1835
1836 /**
1837  * Hook: real function call was optimized.
1838  */
1839 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1840 {
1841         (void) context;
1842         (void) call;
1843         if (! status->stat_options)
1844                 return;
1845
1846         STAT_ENTER;
1847         {
1848                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1849
1850                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1851         }
1852         STAT_LEAVE;
1853 }  /* stat_func_call */
1854
1855 /**
1856  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1857  *
1858  * @param ctx  the hook context
1859  */
1860 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1861 {
1862         (void) ctx;
1863         if (! status->stat_options)
1864                 return;
1865
1866         STAT_ENTER;
1867         {
1868                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1869                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1870         }
1871         STAT_LEAVE;
1872 }  /* stat_arch_dep_replace_mul_with_shifts */
1873
1874 /**
1875  * Hook: A division by const was replaced.
1876  *
1877  * @param ctx   the hook context
1878  * @param node  the division node that will be optimized
1879  */
1880 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1881 {
1882         (void) ctx;
1883         if (! status->stat_options)
1884                 return;
1885
1886         STAT_ENTER;
1887         {
1888                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1889                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1890         }
1891         STAT_LEAVE;
1892 }  /* stat_arch_dep_replace_division_by_const */
1893
1894 /*
1895  * Update the register pressure of a block.
1896  *
1897  * @param irg        the irg containing the block
1898  * @param block      the block for which the reg pressure should be set
1899  * @param pressure   the pressure
1900  * @param class_name the name of the register class
1901  */
1902 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1903 {
1904         if (! status->stat_options)
1905                 return;
1906
1907         STAT_ENTER;
1908         {
1909                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1910                 be_block_entry_t     *block_ent;
1911                 reg_pressure_entry_t *rp_ent;
1912
1913                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1914                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
1915
1916                 rp_ent->class_name = class_name;
1917                 rp_ent->pressure   = pressure;
1918
1919                 pset_insert(block_ent->reg_pressure, rp_ent, hash_ptr(class_name));
1920         }
1921         STAT_LEAVE;
1922 }  /* stat_be_block_regpressure */
1923
1924 /**
1925  * Update the distribution of ready nodes of a block
1926  *
1927  * @param irg        the irg containing the block
1928  * @param block      the block for which the reg pressure should be set
1929  * @param num_ready  the number of ready nodes
1930  */
1931 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
1932 {
1933         if (! status->stat_options)
1934                 return;
1935
1936         STAT_ENTER;
1937         {
1938                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
1939                 be_block_entry_t *block_ent;
1940
1941                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1942
1943                 /* increase the counter of corresponding number of ready nodes */
1944                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
1945         }
1946         STAT_LEAVE;
1947 }  /* stat_be_block_sched_ready */
1948
1949 /**
1950  * Update the permutation statistic of a block.
1951  *
1952  * @param class_name the name of the register class
1953  * @param n_regs     number of registers in the register class
1954  * @param perm       the perm node
1955  * @param block      the block containing the perm
1956  * @param size       the size of the perm
1957  * @param real_size  number of pairs with different registers
1958  */
1959 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
1960                              int size, int real_size)
1961 {
1962         if (! status->stat_options)
1963                 return;
1964
1965         STAT_ENTER;
1966         {
1967                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
1968                 be_block_entry_t   *block_ent;
1969                 perm_class_entry_t *pc_ent;
1970                 perm_stat_entry_t  *ps_ent;
1971
1972                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1973                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
1974                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
1975
1976                 pc_ent->n_regs = n_regs;
1977
1978                 /* update information */
1979                 ps_ent->size      = size;
1980                 ps_ent->real_size = real_size;
1981         }
1982         STAT_LEAVE;
1983 }  /* stat_be_block_stat_perm */
1984
1985 /**
1986  * Update the permutation statistic of a single perm.
1987  *
1988  * @param class_name the name of the register class
1989  * @param perm       the perm node
1990  * @param block      the block containing the perm
1991  * @param is_chain   1 if chain, 0 if cycle
1992  * @param size       length of the cycle/chain
1993  * @param n_ops      the number of ops representing this cycle/chain after lowering
1994  */
1995 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
1996                                   int is_chain, int size, int n_ops)
1997 {
1998         if (! status->stat_options)
1999                 return;
2000
2001         STAT_ENTER;
2002         {
2003                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2004                 be_block_entry_t   *block_ent;
2005                 perm_class_entry_t *pc_ent;
2006                 perm_stat_entry_t  *ps_ent;
2007
2008                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2009                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2010                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2011
2012                 if (is_chain) {
2013                         ps_ent->n_copies += n_ops;
2014                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2015                 } else {
2016                         ps_ent->n_exchg += n_ops;
2017                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2018                 }  /* if */
2019         }
2020         STAT_LEAVE;
2021 }  /* stat_be_block_stat_permcycle */
2022
2023 /* Dumps a statistics snapshot. */
2024 void stat_dump_snapshot(const char *name, const char *phase)
2025 {
2026         char fname[2048];
2027         const char *p;
2028         size_t l;
2029
2030         if (! status->stat_options)
2031                 return;
2032
2033         STAT_ENTER;
2034         {
2035                 graph_entry_t *entry;
2036                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2037
2038                 /*
2039                  * The constant counter is only global, so we clear it here.
2040                  * Note that it does NOT contain the constants in DELETED
2041                  * graphs due to this.
2042                  */
2043                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2044                         stat_const_clear(status);
2045
2046                 /* build the name */
2047                 p = strrchr(name, '/');
2048 #ifdef _WIN32
2049                 {
2050                         const char *q;
2051
2052                         q = strrchr(name, '\\');
2053
2054                         /* NULL might be not the smallest pointer */
2055                         if (q && (!p || q > p))
2056                                 p = q;
2057                 }
2058 #endif /* _WIN32 */
2059                 if (p) {
2060                         ++p;
2061                         l = p - name;
2062
2063                         if (l > (int) (sizeof(fname) - 1))
2064                                 l = sizeof(fname) - 1;
2065
2066                         memcpy(fname, name, l);
2067                         fname[l] = '\0';
2068                 } else {
2069                         fname[0] = '\0';
2070                         p = name;
2071                 }  /* if */
2072                 strncat(fname, "firmstat-", sizeof(fname)-1);
2073                 strncat(fname, phase, sizeof(fname)-1);
2074                 strncat(fname, "-", sizeof(fname)-1);
2075                 strncat(fname, p, sizeof(fname)-1);
2076
2077                 stat_dump_init(fname);
2078
2079                 /* calculate the graph statistics */
2080                 foreach_pset(status->irg_hash, graph_entry_t*, entry) {
2081                         if (entry->irg == NULL) {
2082                                 /* special entry for the global count */
2083                                 continue;
2084                         }  /* if */
2085                         if (! entry->is_deleted) {
2086                                 /* the graph is still alive, count the nodes on it */
2087                                 update_graph_stat(global, entry);
2088                         }  /* if */
2089                 }  /* for */
2090
2091                 /* some calculations are dependent, we pushed them on the wait_q */
2092                 while (! pdeq_empty(status->wait_q)) {
2093                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2094
2095                         update_graph_stat_2(global, entry);
2096                 }  /* while */
2097
2098                 /* dump per graph */
2099                 foreach_pset(status->irg_hash, graph_entry_t*, entry) {
2100                         if (entry->irg == NULL) {
2101                                 /* special entry for the global count */
2102                                 continue;
2103                         }  /* if */
2104
2105                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2106                                 stat_dump_graph(entry);
2107                                 stat_dump_registered(entry);
2108                         }  /* if */
2109
2110                         if (! entry->is_deleted) {
2111                                 /* clear the counter that are not accumulated */
2112                                 graph_clear_entry(entry, 0);
2113                         }  /* if */
2114                 }  /* for */
2115
2116                 /* dump global */
2117                 stat_dump_graph(global);
2118
2119                 /* dump the const info */
2120                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2121                         stat_dump_consts(&status->const_info);
2122
2123                 /* dump the parameter distribution */
2124                 stat_dump_param_tbl(status->dist_param_cnt, global);
2125
2126                 /* dump the optimization counter and clear them */
2127                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2128                 clear_optimization_counter();
2129
2130                 stat_dump_finish();
2131
2132                 stat_finish_pattern_history(fname);
2133
2134                 /* clear the global counters here */
2135                 {
2136                         node_entry_t *entry;
2137
2138                         foreach_pset(global->opcode_hash, node_entry_t*, entry) {
2139                                 opcode_clear_entry(entry);
2140                         }  /* for */
2141                         /* clear all global counter */
2142                         graph_clear_entry(global, /*all=*/1);
2143                 }
2144         }
2145         STAT_LEAVE;
2146 }  /* stat_dump_snapshot */
2147
2148 typedef struct pass_t {
2149         ir_prog_pass_t pass;
2150         const char     *fname;
2151         const char     *phase;
2152 } pass_t;
2153
2154 /**
2155  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2156  */
2157 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2158 {
2159         pass_t *pass = (pass_t*)context;
2160
2161         (void)irp;
2162         stat_dump_snapshot(pass->fname, pass->phase);
2163         return 0;
2164 }  /* stat_dump_snapshot_wrapper */
2165
2166 /**
2167  * Ensure that no verifier is run from the wrapper.
2168  */
2169 static int no_verify(ir_prog *prog, void *ctx)
2170 {
2171         (void)prog;
2172         (void)ctx;
2173         return 0;
2174 }
2175
2176 /**
2177  * Ensure that no dumper is run from the wrapper.
2178  */
2179 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2180 {
2181         (void)prog;
2182         (void)ctx;
2183         (void)idx;
2184 }
2185
2186 /* create an ir_pog pass */
2187 ir_prog_pass_t *stat_dump_snapshot_pass(
2188         const char *name, const char *fname, const char *phase)
2189 {
2190         pass_t *pass = XMALLOCZ(pass_t);
2191
2192         def_prog_pass_constructor(
2193                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2194         pass->fname = fname;
2195         pass->phase = phase;
2196
2197         /* no dump/verify */
2198         pass->pass.dump_irprog   = no_dump;
2199         pass->pass.verify_irprog = no_verify;
2200
2201         return &pass->pass;
2202 }  /* stat_dump_snapshot_pass */
2203
2204 /** the hook entries for the Firm statistics module */
2205 static hook_entry_t stat_hooks[hook_last];
2206
2207 /* initialize the statistics module. */
2208 void firm_init_stat(unsigned enable_options)
2209 {
2210 #define X(a)  a, sizeof(a)-1
2211 #define HOOK(h, fkt) \
2212         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2213         unsigned num = 0;
2214
2215         if (! (enable_options & FIRMSTAT_ENABLED))
2216                 return;
2217
2218         status = XMALLOCZ(stat_info_t);
2219
2220         /* enable statistics */
2221         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2222
2223         /* register all hooks */
2224         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2225         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2226         HOOK(hook_new_node,                           stat_new_node);
2227         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2228         HOOK(hook_normalize,                          stat_normalize);
2229         HOOK(hook_new_graph,                          stat_new_graph);
2230         HOOK(hook_free_graph,                         stat_free_graph);
2231         HOOK(hook_irg_walk,                           stat_irg_walk);
2232         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2233         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2234         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2235         HOOK(hook_reassociate,                        stat_reassociate);
2236         HOOK(hook_lower,                              stat_lower);
2237         HOOK(hook_inline,                             stat_inline);
2238         HOOK(hook_tail_rec,                           stat_tail_rec);
2239         HOOK(hook_strength_red,                       stat_strength_red);
2240         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2241         HOOK(hook_if_conversion,                      stat_if_conversion);
2242         HOOK(hook_func_call,                          stat_func_call);
2243         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2244         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2245
2246         obstack_init(&status->cnts);
2247         obstack_init(&status->be_data);
2248
2249         /* create the hash-tables */
2250         status->irg_hash   = new_pset(graph_cmp, 8);
2251         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2252
2253         /* create the wait queue */
2254         status->wait_q     = new_pdeq();
2255
2256         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2257                 /* build the pseudo-ops */
2258
2259                 _op_Phi0.code    = --num;
2260                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2261
2262                 _op_PhiM.code    = --num;
2263                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2264
2265                 _op_ProjM.code   = --num;
2266                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2267
2268                 _op_MulC.code    = --num;
2269                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2270
2271                 _op_DivC.code    = --num;
2272                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2273
2274                 _op_ModC.code    = --num;
2275                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2276
2277                 status->op_Phi0    = &_op_Phi0;
2278                 status->op_PhiM    = &_op_PhiM;
2279                 status->op_ProjM   = &_op_ProjM;
2280                 status->op_MulC    = &_op_MulC;
2281                 status->op_DivC    = &_op_DivC;
2282                 status->op_ModC    = &_op_ModC;
2283         } else {
2284                 status->op_Phi0    = NULL;
2285                 status->op_PhiM    = NULL;
2286                 status->op_ProjM   = NULL;
2287                 status->op_MulC    = NULL;
2288                 status->op_DivC    = NULL;
2289                 status->op_ModC    = NULL;
2290         }  /* if */
2291
2292         /* for Florian: count the Sel depth */
2293         if (enable_options & FIRMSTAT_COUNT_SELS) {
2294                 _op_SelSel.code    = --num;
2295                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2296
2297                 _op_SelSelSel.code = --num;
2298                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2299
2300                 status->op_SelSel    = &_op_SelSel;
2301                 status->op_SelSelSel = &_op_SelSelSel;
2302         } else {
2303                 status->op_SelSel    = NULL;
2304                 status->op_SelSelSel = NULL;
2305         }  /* if */
2306
2307         /* register the dumper */
2308         stat_register_dumper(&simple_dumper);
2309
2310         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2311                 stat_register_dumper(&csv_dumper);
2312
2313         /* initialize the pattern hash */
2314         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2315
2316         /* initialize the Const options */
2317         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2318                 stat_init_const_cnt(status);
2319
2320         /* distribution table for parameter counts */
2321         status->dist_param_cnt = stat_new_int_distrib_tbl();
2322
2323         clear_optimization_counter();
2324
2325 #undef HOOK
2326 #undef X
2327 }  /* firm_init_stat */
2328
2329 /**
2330  * Frees all dumper structures.
2331  */
2332 static void stat_term_dumper(void)
2333 {
2334         dumper_t *dumper, *next_dumper;
2335
2336         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2337                 if (dumper->func_map)
2338                         del_pset(dumper->func_map);
2339
2340                 next_dumper = dumper->next;
2341                 free(dumper);
2342                 dumper = next_dumper;
2343         }  /* for */
2344 }  /* stat_term_dumper */
2345
2346
2347 /* Terminates the statistics module, frees all memory. */
2348 void stat_term(void)
2349 {
2350         if (status != (stat_info_t *)&status_disable) {
2351                 obstack_free(&status->be_data, NULL);
2352                 obstack_free(&status->cnts, NULL);
2353
2354                 stat_term_dumper();
2355
2356                 xfree(status);
2357                 status = (stat_info_t *)&status_disable;
2358         }
2359 }  /* stat_term */
2360
2361 /* returns 1 if statistics were initialized, 0 otherwise */
2362 int stat_is_active(void)
2363 {
2364         return status != (stat_info_t *)&status_disable;
2365 }  /* stat_is_active */