remove $Id$, it doesn't work with git anyway
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  */
25 #include "config.h"
26
27 #include <stdio.h>
28 #include <stdlib.h>
29 #include <string.h>
30
31 #include "irouts.h"
32 #include "irdump.h"
33 #include "hashptr.h"
34 #include "firmstat_t.h"
35 #include "irpass_t.h"
36 #include "pattern.h"
37 #include "dags.h"
38 #include "stat_dmp.h"
39 #include "xmalloc.h"
40 #include "irhooks.h"
41 #include "util.h"
42
43 /*
44  * need this to be static:
45  * Special pseudo Opcodes that we need to count some interesting cases
46  */
47
48 /**
49  * The Phi0, a node that is created during SSA construction
50  */
51 static ir_op _op_Phi0;
52
53 /** The PhiM, just to count memory Phi's. */
54 static ir_op _op_PhiM;
55
56 /** The Mul by Const node. */
57 static ir_op _op_MulC;
58
59 /** The Div by Const node. */
60 static ir_op _op_DivC;
61
62 /** The Div by Const node. */
63 static ir_op _op_ModC;
64
65 /** The memory Proj node. */
66 static ir_op _op_ProjM;
67
68 /** A Sel of a Sel */
69 static ir_op _op_SelSel;
70
71 /** A Sel of a Sel of a Sel */
72 static ir_op _op_SelSelSel;
73
74 /* ---------------------------------------------------------------------------------- */
75
76 /** Marks the begin of a statistic (hook) function. */
77 #define STAT_ENTER    ++status->recursive
78
79 /** Marks the end of a statistic (hook) functions. */
80 #define STAT_LEAVE    --status->recursive
81
82 /** Allows to enter a statistic function only when we are not already in a hook. */
83 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
84
85 /**
86  * global status
87  */
88 static const unsigned status_disable = 0;
89 static stat_info_t *status = (stat_info_t *)&status_disable;
90
91 /**
92  * Compare two elements of the opcode hash.
93  */
94 static int opcode_cmp(const void *elt, const void *key)
95 {
96         const node_entry_t *e1 = (const node_entry_t*)elt;
97         const node_entry_t *e2 = (const node_entry_t*)key;
98
99         return e1->op->code - e2->op->code;
100 }  /* opcode_cmp */
101
102 /**
103  * Compare two elements of the graph hash.
104  */
105 static int graph_cmp(const void *elt, const void *key)
106 {
107         const graph_entry_t *e1 = (const graph_entry_t*)elt;
108         const graph_entry_t *e2 = (const graph_entry_t*)key;
109
110         return e1->irg != e2->irg;
111 }  /* graph_cmp */
112
113 /**
114  * Compare two elements of the optimization hash.
115  */
116 static int opt_cmp(const void *elt, const void *key)
117 {
118         const opt_entry_t *e1 = (const opt_entry_t*)elt;
119         const opt_entry_t *e2 = (const opt_entry_t*)key;
120
121         return e1->op->code != e2->op->code;
122 }  /* opt_cmp */
123
124 /**
125  * Compare two elements of the block/extbb hash.
126  */
127 static int block_cmp(const void *elt, const void *key)
128 {
129         const block_entry_t *e1 = (const block_entry_t*)elt;
130         const block_entry_t *e2 = (const block_entry_t*)key;
131
132         /* it's enough to compare the block number */
133         return e1->block_nr != e2->block_nr;
134 }  /* block_cmp */
135
136 /**
137  * Compare two elements of the be_block hash.
138  */
139 static int be_block_cmp(const void *elt, const void *key)
140 {
141         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
142         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
143
144         return e1->block_nr != e2->block_nr;
145 }  /* be_block_cmp */
146
147 /**
148  * Compare two elements of reg pressure hash.
149  */
150 static int reg_pressure_cmp(const void *elt, const void *key)
151 {
152         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
153         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
154
155         return e1->class_name != e2->class_name;
156 }  /* reg_pressure_cmp */
157
158 /**
159  * Compare two elements of the perm_stat hash.
160  */
161 static int perm_stat_cmp(const void *elt, const void *key)
162 {
163         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
164         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
165
166         return e1->perm != e2->perm;
167 }  /* perm_stat_cmp */
168
169 /**
170  * Compare two elements of the perm_class hash.
171  */
172 static int perm_class_cmp(const void *elt, const void *key)
173 {
174         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
175         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
176
177         return e1->class_name != e2->class_name;
178 }  /* perm_class_cmp */
179
180 /**
181  * Compare two elements of the ir_op hash.
182  */
183 static int opcode_cmp_2(const void *elt, const void *key)
184 {
185         const ir_op *e1 = (const ir_op*)elt;
186         const ir_op *e2 = (const ir_op*)key;
187
188         return e1->code != e2->code;
189 }  /* opcode_cmp_2 */
190
191 /**
192  * Compare two elements of the address_mark set.
193  */
194 static int address_mark_cmp(const void *elt, const void *key, size_t size)
195 {
196         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
197         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
198         (void) size;
199
200         /* compare only the nodes, the rest is used as data container */
201         return e1->node != e2->node;
202 }  /* address_mark_cmp */
203
204 /**
205  * Clear all counter in a node_entry_t.
206  */
207 static void opcode_clear_entry(node_entry_t *elem)
208 {
209         cnt_clr(&elem->cnt_alive);
210         cnt_clr(&elem->new_node);
211         cnt_clr(&elem->into_Id);
212         cnt_clr(&elem->normalized);
213 }  /* opcode_clear_entry */
214
215 /**
216  * Returns the associates node_entry_t for an ir_op (and allocates
217  * one if not yet available).
218  *
219  * @param op    the IR operation
220  * @param hmap  a hash map containing ir_op* -> node_entry_t*
221  */
222 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
223 {
224         node_entry_t key;
225         node_entry_t *elem;
226
227         key.op = op;
228
229         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
230         if (elem)
231                 return elem;
232
233         elem = OALLOCZ(&status->cnts, node_entry_t);
234
235         /* clear counter */
236         opcode_clear_entry(elem);
237
238         elem->op = op;
239
240         return (node_entry_t*)pset_insert(hmap, elem, op->code);
241 }  /* opcode_get_entry */
242
243 /**
244  * Returns the associates ir_op for an opcode
245  *
246  * @param code  the IR opcode
247  * @param hmap  the hash map containing opcode -> ir_op*
248  */
249 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
250 {
251         ir_op key;
252
253         key.code = code;
254         return (ir_op*)pset_find(hmap, &key, code);
255 }  /* opcode_find_entry */
256
257 /**
258  * Clears all counter in a graph_entry_t.
259  *
260  * @param elem  the graph entry
261  * @param all   if non-zero, clears all counters, else leave accumulated ones
262  */
263 static void graph_clear_entry(graph_entry_t *elem, int all)
264 {
265         int i;
266
267         /* clear accumulated / non-accumulated counter */
268         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
269                 cnt_clr(&elem->cnt[i]);
270         }  /* for */
271
272         if (elem->block_hash) {
273                 del_pset(elem->block_hash);
274                 elem->block_hash = NULL;
275         }  /* if */
276
277         if (elem->extbb_hash) {
278                 del_pset(elem->extbb_hash);
279                 elem->extbb_hash = NULL;
280         }  /* if */
281
282         obstack_free(&elem->recalc_cnts, NULL);
283         obstack_init(&elem->recalc_cnts);
284 }  /* graph_clear_entry */
285
286 /**
287  * Returns the associated graph_entry_t for an IR graph.
288  *
289  * @param irg   the IR graph, NULL for the global counter
290  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
291  */
292 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
293 {
294         graph_entry_t key;
295         graph_entry_t *elem;
296         size_t i;
297
298         key.irg = irg;
299
300         elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg));
301
302         if (elem) {
303                 /* create hash map backend block information */
304                 if (! elem->be_block_hash)
305                         elem->be_block_hash = new_pset(be_block_cmp, 5);
306
307                 return elem;
308         }  /* if */
309
310         /* allocate a new one */
311         elem = OALLOCZ(&status->cnts, graph_entry_t);
312         obstack_init(&elem->recalc_cnts);
313
314         /* clear counter */
315         graph_clear_entry(elem, 1);
316
317         /* new hash table for opcodes here  */
318         elem->opcode_hash   = new_pset(opcode_cmp, 5);
319         elem->address_mark  = new_set(address_mark_cmp, 5);
320         elem->irg           = irg;
321
322         /* these hash tables are created on demand */
323         elem->block_hash = NULL;
324         elem->extbb_hash = NULL;
325
326         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
327                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
328
329         return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg));
330 }  /* graph_get_entry */
331
332 /**
333  * Clear all counter in an opt_entry_t.
334  */
335 static void opt_clear_entry(opt_entry_t *elem)
336 {
337         cnt_clr(&elem->count);
338 }  /* opt_clear_entry */
339
340 /**
341  * Returns the associated opt_entry_t for an IR operation.
342  *
343  * @param op    the IR operation
344  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
345  */
346 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
347 {
348         opt_entry_t key;
349         opt_entry_t *elem;
350
351         key.op = op;
352
353         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
354         if (elem)
355                 return elem;
356
357         elem = OALLOCZ(&status->cnts, opt_entry_t);
358
359         /* clear new counter */
360         opt_clear_entry(elem);
361
362         elem->op = op;
363
364         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
365 }  /* opt_get_entry */
366
367 /**
368  * clears all counter in a block_entry_t
369  */
370 static void block_clear_entry(block_entry_t *elem)
371 {
372         int i;
373
374         for (i = 0; i < _bcnt_last; ++i)
375                 cnt_clr(&elem->cnt[i]);
376 }  /* block_clear_entry */
377
378 /**
379  * Returns the associated block_entry_t for an block.
380  *
381  * @param block_nr  an IR  block number
382  * @param hmap      a hash map containing long -> block_entry_t
383  */
384 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
385 {
386         block_entry_t key;
387         block_entry_t *elem;
388
389         key.block_nr = block_nr;
390
391         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
392         if (elem)
393                 return elem;
394
395         elem = OALLOCZ(obst, block_entry_t);
396
397         /* clear new counter */
398         block_clear_entry(elem);
399
400         elem->block_nr = block_nr;
401
402         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
403 }  /* block_get_entry */
404
405 /**
406  * Clear all sets in be_block_entry_t.
407  */
408 static void be_block_clear_entry(be_block_entry_t *elem)
409 {
410         if (elem->reg_pressure)
411                 del_pset(elem->reg_pressure);
412
413         if (elem->sched_ready)
414                 stat_delete_distrib_tbl(elem->sched_ready);
415
416         if (elem->perm_class_stat)
417                 del_pset(elem->perm_class_stat);
418
419         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
420         elem->sched_ready     = stat_new_int_distrib_tbl();
421         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
422 }  /* be_block_clear_entry */
423
424 /**
425  * Returns the associated be_block_entry_t for an block.
426  *
427  * @param block_nr  an IR  block number
428  * @param hmap      a hash map containing long -> be_block_entry_t
429  */
430 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
431 {
432         be_block_entry_t key;
433         be_block_entry_t *elem;
434
435         key.block_nr = block_nr;
436
437         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
438         if (elem)
439                 return elem;
440
441         elem = OALLOCZ(obst, be_block_entry_t);
442
443         /* clear new counter */
444         be_block_clear_entry(elem);
445
446         elem->block_nr = block_nr;
447
448         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
449 }  /* be_block_get_entry */
450
451 /**
452  * clears all sets in perm_class_entry_t
453  */
454 static void perm_class_clear_entry(perm_class_entry_t *elem)
455 {
456         if (elem->perm_stat)
457                 del_pset(elem->perm_stat);
458
459         elem->perm_stat = new_pset(perm_stat_cmp, 5);
460 }  /* perm_class_clear_entry */
461
462 /**
463  * Returns the associated perm_class entry for a register class.
464  *
465  * @param class_name  the register class name
466  * @param hmap        a hash map containing class_name -> perm_class_entry_t
467  */
468 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
469                                                 hmap_perm_class_entry_t *hmap)
470 {
471         perm_class_entry_t key;
472         perm_class_entry_t *elem;
473
474         key.class_name = class_name;
475
476         elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name));
477         if (elem)
478                 return elem;
479
480         elem = OALLOCZ(obst, perm_class_entry_t);
481
482         /* clear new counter */
483         perm_class_clear_entry(elem);
484
485         elem->class_name = class_name;
486
487         return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name));
488 }  /* perm_class_get_entry */
489
490 /**
491  * clears all sets in perm_stat_entry_t
492  */
493 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
494 {
495         if (elem->chains)
496                 stat_delete_distrib_tbl(elem->chains);
497
498         if (elem->cycles)
499                 stat_delete_distrib_tbl(elem->cycles);
500
501         elem->chains = stat_new_int_distrib_tbl();
502         elem->cycles = stat_new_int_distrib_tbl();
503 }  /* perm_stat_clear_entry */
504
505 /**
506  * Returns the associated perm_stat entry for a perm.
507  *
508  * @param perm      the perm node
509  * @param hmap      a hash map containing perm -> perm_stat_entry_t
510  */
511 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
512 {
513         perm_stat_entry_t key;
514         perm_stat_entry_t *elem;
515
516         key.perm = perm;
517
518         elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm));
519         if (elem)
520                 return elem;
521
522         elem = OALLOCZ(obst, perm_stat_entry_t);
523
524         /* clear new counter */
525         perm_stat_clear_entry(elem);
526
527         elem->perm = perm;
528
529         return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm));
530 }  /* perm_stat_get_entry */
531
532 /**
533  * Clear optimizations counter,
534  */
535 static void clear_optimization_counter(void)
536 {
537         int i;
538         for (i = 0; i < FS_OPT_MAX; ++i)
539                 cnt_clr(&status->num_opts[i]);
540 }
541
542 /**
543  * Returns the ir_op for an IR-node,
544  * handles special cases and return pseudo op codes.
545  *
546  * @param none  an IR node
547  */
548 static ir_op *stat_get_irn_op(ir_node *node)
549 {
550         ir_op *op = get_irn_op(node);
551         unsigned opc = op->code;
552
553         switch (opc) {
554         case iro_Phi:
555                 if (get_irn_arity(node) == 0) {
556                         /* special case, a Phi0 node, count on extra counter */
557                         op = status->op_Phi0 ? status->op_Phi0 : op;
558                 } else if (get_irn_mode(node) == mode_M) {
559                         /* special case, a Memory Phi node, count on extra counter */
560                         op = status->op_PhiM ? status->op_PhiM : op;
561                 }  /* if */
562                 break;
563         case iro_Proj:
564                 if (get_irn_mode(node) == mode_M) {
565                         /* special case, a Memory Proj node, count on extra counter */
566                         op = status->op_ProjM ? status->op_ProjM : op;
567                 }  /* if */
568                 break;
569         case iro_Mul:
570                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
571                         /* special case, a Multiply by a const, count on extra counter */
572                         op = status->op_MulC ? status->op_MulC : op;
573                 }  /* if */
574                 break;
575         case iro_Div:
576                 if (is_Const(get_Div_right(node))) {
577                         /* special case, a division by a const, count on extra counter */
578                         op = status->op_DivC ? status->op_DivC : op;
579                 }  /* if */
580                 break;
581         case iro_Mod:
582                 if (is_Const(get_Mod_right(node))) {
583                         /* special case, a module by a const, count on extra counter */
584                         op = status->op_ModC ? status->op_ModC : op;
585                 }  /* if */
586                 break;
587         case iro_Sel:
588                 if (is_Sel(get_Sel_ptr(node))) {
589                         /* special case, a Sel of a Sel, count on extra counter */
590                         op = status->op_SelSel ? status->op_SelSel : op;
591                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
592                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
593                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
594                         }  /* if */
595                 }  /* if */
596                 break;
597         default:
598                 break;
599         }  /* switch */
600
601         return op;
602 }  /* stat_get_irn_op */
603
604 /**
605  * update the block counter
606  */
607 static void undate_block_info(ir_node *node, graph_entry_t *graph)
608 {
609         ir_op *op = get_irn_op(node);
610         ir_node *block;
611         block_entry_t *b_entry;
612         int i, arity;
613
614         /* check for block */
615         if (op == op_Block) {
616                 arity = get_irn_arity(node);
617                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
618                 /* mark start end block to allow to filter them out */
619                 if (node == get_irg_start_block(graph->irg))
620                         b_entry->is_start = 1;
621                 else if (node == get_irg_end_block(graph->irg))
622                         b_entry->is_end = 1;
623
624                 /* count all incoming edges */
625                 for (i = 0; i < arity; ++i) {
626                         ir_node *pred = get_irn_n(node, i);
627                         ir_node *other_block = get_nodes_block(pred);
628                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
629
630                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
631                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
632                 }  /* for */
633                 return;
634         }  /* if */
635
636         block   = get_nodes_block(node);
637         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
638
639         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
640                 /* count data Phi per block */
641                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
642         }  /* if */
643
644         /* we have a new node in our block */
645         cnt_inc(&b_entry->cnt[bcnt_nodes]);
646
647         /* don't count keep-alive edges */
648         if (is_End(node))
649                 return;
650
651         arity = get_irn_arity(node);
652
653         for (i = 0; i < arity; ++i) {
654                 ir_node *pred = get_irn_n(node, i);
655                 ir_node *other_block;
656
657                 other_block = get_nodes_block(pred);
658
659                 if (other_block == block)
660                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
661                 else {
662                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
663
664                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
665                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
666                 }  /* if */
667         }  /* for */
668 }  /* undate_block_info */
669
670 /**
671  * Update the extended block counter.
672  */
673 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
674 {
675         ir_op *op = get_irn_op(node);
676         ir_extblk *extbb;
677         extbb_entry_t *eb_entry;
678         int i, arity;
679
680         /* check for block */
681         if (op == op_Block) {
682                 extbb = get_nodes_extbb(node);
683                 arity = get_irn_arity(node);
684                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
685
686                 /* count all incoming edges */
687                 for (i = 0; i < arity; ++i) {
688                         ir_node *pred = get_irn_n(node, i);
689                         ir_extblk *other_extbb = get_nodes_extbb(pred);
690
691                         if (extbb != other_extbb) {
692                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
693
694                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
695                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
696                         }  /* if */
697                 }  /* for */
698                 return;
699         }  /* if */
700
701         extbb    = get_nodes_extbb(node);
702         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
703
704         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
705                 /* count data Phi per extbb */
706                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
707         }  /* if */
708
709         /* we have a new node in our block */
710         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
711
712         /* don't count keep-alive edges */
713         if (is_End(node))
714                 return;
715
716         arity = get_irn_arity(node);
717
718         for (i = 0; i < arity; ++i) {
719                 ir_node *pred = get_irn_n(node, i);
720                 ir_extblk *other_extbb = get_nodes_extbb(pred);
721
722                 if (other_extbb == extbb)
723                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
724                 else {
725                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
726
727                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
728                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
729                 }  /* if */
730         }  /* for */
731 }  /* update_extbb_info */
732
733 /**
734  * Calculates how many arguments of the call are const, updates
735  * param distribution.
736  */
737 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
738 {
739         int i, num_const_args = 0, num_local_adr = 0;
740         int n = get_Call_n_params(call);
741
742         for (i = 0; i < n; ++i) {
743                 ir_node *param = get_Call_param(call, i);
744
745                 if (is_irn_constlike(param))
746                         ++num_const_args;
747                 else if (is_Sel(param)) {
748                         ir_node *base = param;
749
750                         do {
751                                 base = get_Sel_ptr(base);
752                         } while (is_Sel(base));
753
754                         if (base == get_irg_frame(current_ir_graph))
755                                 ++num_local_adr;
756                 }
757
758         }  /* for */
759
760         if (num_const_args > 0)
761                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
762         if (num_const_args == n)
763                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
764         if (num_local_adr > 0)
765                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
766
767         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
768 }  /* analyse_params_of_Call */
769
770 /**
771  * Update info on calls.
772  *
773  * @param call   The call
774  * @param graph  The graph entry containing the call
775  */
776 static void stat_update_call(ir_node *call, graph_entry_t *graph)
777 {
778         ir_node   *block = get_nodes_block(call);
779         ir_node   *ptr = get_Call_ptr(call);
780         ir_entity *ent = NULL;
781         ir_graph  *callee = NULL;
782
783         /*
784          * If the block is bad, the whole subgraph will collapse later
785          * so do not count this call.
786          * This happens in dead code.
787          */
788         if (is_Bad(block))
789                 return;
790
791         cnt_inc(&graph->cnt[gcnt_all_calls]);
792
793         /* found a call, this function is not a leaf */
794         graph->is_leaf = 0;
795
796         if (is_SymConst(ptr)) {
797                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
798                         /* ok, we seems to know the entity */
799                         ent = get_SymConst_entity(ptr);
800                         callee = get_entity_irg(ent);
801
802                         /* it is recursive, if it calls at least once */
803                         if (callee == graph->irg)
804                                 graph->is_recursive = 1;
805                         if (callee == NULL)
806                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
807                 }  /* if */
808         } else {
809                 /* indirect call, be could not predict */
810                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
811
812                 /* NOT a leaf call */
813                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
814         }  /* if */
815
816         /* check, if it's a chain-call: Then, the call-block
817          * must dominate the end block. */
818         {
819                 ir_node *curr = get_irg_end_block(graph->irg);
820                 int depth = get_Block_dom_depth(block);
821
822                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
823                         curr = get_Block_idom(curr);
824
825                         if (! curr || !is_Block(curr))
826                                 break;
827                 }  /* for */
828
829                 if (curr != block)
830                         graph->is_chain_call = 0;
831         }
832
833         /* check, if the callee is a leaf */
834         if (callee) {
835                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
836
837                 if (called->is_analyzed) {
838                         if (! called->is_leaf)
839                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
840                 }  /* if */
841         }  /* if */
842
843         analyse_params_of_Call(graph, call);
844 }  /* stat_update_call */
845
846 /**
847  * Update info on calls for graphs on the wait queue.
848  */
849 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
850 {
851         ir_node   *block = get_nodes_block(call);
852         ir_node   *ptr = get_Call_ptr(call);
853         ir_entity *ent = NULL;
854         ir_graph  *callee = NULL;
855
856         /*
857          * If the block is bad, the whole subgraph will collapse later
858          * so do not count this call.
859          * This happens in dead code.
860          */
861         if (is_Bad(block))
862                 return;
863
864         if (is_SymConst(ptr)) {
865                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
866                         /* ok, we seems to know the entity */
867                         ent = get_SymConst_entity(ptr);
868                         callee = get_entity_irg(ent);
869                 }  /* if */
870         }  /* if */
871
872         /* check, if the callee is a leaf */
873         if (callee) {
874                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
875
876                 assert(called->is_analyzed);
877
878                 if (! called->is_leaf)
879                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
880         } else
881                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
882 }  /* stat_update_call_2 */
883
884 /**
885  * Find the base address and entity of an Sel node.
886  *
887  * @param sel  the node
888  *
889  * @return the base address.
890  */
891 static ir_node *find_base_adr(ir_node *sel)
892 {
893         ir_node *ptr = get_Sel_ptr(sel);
894
895         while (is_Sel(ptr)) {
896                 sel = ptr;
897                 ptr = get_Sel_ptr(sel);
898         }
899         return ptr;
900 }  /* find_base_adr */
901
902 /**
903  * Update info on Load/Store address statistics.
904  */
905 static void stat_update_address(ir_node *node, graph_entry_t *graph)
906 {
907         unsigned opc = get_irn_opcode(node);
908         ir_node *base;
909         ir_graph *irg;
910
911         switch (opc) {
912         case iro_SymConst:
913                 /* a global address */
914                 cnt_inc(&graph->cnt[gcnt_global_adr]);
915                 break;
916         case iro_Sel:
917                 base = find_base_adr(node);
918                 irg = current_ir_graph;
919                 if (base == get_irg_frame(irg)) {
920                         /* a local Variable. */
921                         cnt_inc(&graph->cnt[gcnt_local_adr]);
922                 } else {
923                         /* Pointer access */
924                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
925                                 /* pointer access through parameter, check for THIS */
926                                 ir_entity *ent = get_irg_entity(irg);
927
928                                 if (ent != NULL) {
929                                         ir_type *ent_tp = get_entity_type(ent);
930
931                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
932                                                 if (get_Proj_proj(base) == 0) {
933                                                         /* THIS pointer */
934                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
935                                                         goto end_parameter;
936                                                 }  /* if */
937                                         }  /* if */
938                                 }  /* if */
939                                 /* other parameter */
940                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
941 end_parameter: ;
942                         } else {
943                                 /* unknown Pointer access */
944                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
945                         }  /* if */
946                 }  /* if */
947         default:
948                 break;
949         }  /* switch */
950 }  /* stat_update_address */
951
952 /**
953  * Walker for reachable nodes count.
954  */
955 static void update_node_stat(ir_node *node, void *env)
956 {
957         graph_entry_t *graph = (graph_entry_t*)env;
958         node_entry_t *entry;
959
960         ir_op *op = stat_get_irn_op(node);
961         int i, arity = get_irn_arity(node);
962
963         entry = opcode_get_entry(op, graph->opcode_hash);
964
965         cnt_inc(&entry->cnt_alive);
966         cnt_add_i(&graph->cnt[gcnt_edges], arity);
967
968         /* count block edges */
969         undate_block_info(node, graph);
970
971         /* count extended block edges */
972         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
973                 if (graph->irg != get_const_code_irg())
974                         update_extbb_info(node, graph);
975         }  /* if */
976
977         /* handle statistics for special node types */
978
979         switch (op->code) {
980         case iro_Call:
981                 /* check for properties that depends on calls like recursion/leaf/indirect call */
982                 stat_update_call(node, graph);
983                 break;
984         case iro_Load:
985                 /* check address properties */
986                 stat_update_address(get_Load_ptr(node), graph);
987                 break;
988         case iro_Store:
989                 /* check address properties */
990                 stat_update_address(get_Store_ptr(node), graph);
991                 break;
992         case iro_Phi:
993                 /* check for non-strict Phi nodes */
994                 for (i = arity - 1; i >= 0; --i) {
995                         ir_node *pred = get_Phi_pred(node, i);
996                         if (is_Unknown(pred)) {
997                                 /* found an Unknown predecessor, graph is not strict */
998                                 graph->is_strict = 0;
999                                 break;
1000                         }
1001                 }
1002         default:
1003                 break;
1004         }  /* switch */
1005
1006         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1007         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1008                 int i;
1009
1010                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1011                         ir_node *pred = get_irn_n(node, i);
1012
1013                         if (is_Const(pred)) {
1014                                 /* check properties of constants */
1015                                 stat_update_const(status, pred, graph);
1016                         }  /* if */
1017                 }  /* for */
1018         }  /* if */
1019 }  /* update_node_stat */
1020
1021 /**
1022  * Walker for reachable nodes count for graphs on the wait_q.
1023  */
1024 static void update_node_stat_2(ir_node *node, void *env)
1025 {
1026         graph_entry_t *graph = (graph_entry_t*)env;
1027
1028         /* check for properties that depends on calls like recursion/leaf/indirect call */
1029         if (is_Call(node))
1030                 stat_update_call_2(node, graph);
1031 }  /* update_node_stat_2 */
1032
1033 /**
1034  * Get the current address mark.
1035  */
1036 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1037 {
1038         address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1039
1040         return value ? value->mark : 0;
1041 }  /* get_adr_mark */
1042
1043 /**
1044  * Set the current address mark.
1045  */
1046 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1047 {
1048         address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1049
1050         value->mark = val;
1051 }  /* set_adr_mark */
1052
1053 #undef DUMP_ADR_MODE
1054
1055 #ifdef DUMP_ADR_MODE
1056 /**
1057  * a vcg attribute hook: Color a node with a different color if
1058  * it's identified as a part of an address expression or at least referenced
1059  * by an address expression.
1060  */
1061 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1062 {
1063         ir_node *n           = local ? local : node;
1064         ir_graph *irg        = get_irn_irg(n);
1065         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1066         unsigned mark        = get_adr_mark(graph, n);
1067
1068         if (mark & MARK_ADDRESS_CALC)
1069                 fprintf(F, "color: purple");
1070         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1071                 fprintf(F, "color: pink");
1072         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1073                 fprintf(F, "color: lightblue");
1074         else
1075                 return 0;
1076
1077         /* I know the color! */
1078         return 1;
1079 }  /* stat_adr_mark_hook */
1080 #endif /* DUMP_ADR_MODE */
1081
1082 /**
1083  * Return the "operational" mode of a Firm node.
1084  */
1085 static ir_mode *get_irn_op_mode(ir_node *node)
1086 {
1087         switch (get_irn_opcode(node)) {
1088         case iro_Load:
1089                 return get_Load_mode(node);
1090         case iro_Store:
1091                 return get_irn_mode(get_Store_value(node));
1092         case iro_Div:
1093                 return get_irn_mode(get_Div_left(node));
1094         case iro_Mod:
1095                 return get_irn_mode(get_Mod_left(node));
1096         case iro_Cmp:
1097                 /* Cmp is no address calculation, or is it? */
1098         default:
1099                 return get_irn_mode(node);
1100         }  /* switch */
1101 }  /* get_irn_op_mode */
1102
1103 /**
1104  * Post-walker that marks every node that is an address calculation.
1105  *
1106  * Users of a node must be visited first. We ensure this by
1107  * calling it in the post of an outs walk. This should work even in cycles,
1108  * while the normal pre-walk will not.
1109  */
1110 static void mark_address_calc(ir_node *node, void *env)
1111 {
1112         graph_entry_t *graph = (graph_entry_t*)env;
1113         ir_mode *mode = get_irn_op_mode(node);
1114         int i, n;
1115         unsigned mark_preds = MARK_REF_NON_ADR;
1116
1117         if (! mode_is_data(mode))
1118                 return;
1119
1120         if (mode_is_reference(mode)) {
1121                 /* a reference is calculated here, we are sure */
1122                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1123
1124                 mark_preds = MARK_REF_ADR;
1125         } else {
1126                 unsigned mark = get_adr_mark(graph, node);
1127
1128                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1129                         /*
1130                          * this node has no reference mode, but is only
1131                          * referenced by address calculations
1132                          */
1133                         mark_preds = MARK_REF_ADR;
1134                 }  /* if */
1135         }  /* if */
1136
1137         /* mark all predecessors */
1138         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1139                 ir_node *pred = get_irn_n(node, i);
1140
1141                 mode = get_irn_op_mode(pred);
1142                 if (! mode_is_data(mode))
1143                         continue;
1144
1145                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1146         }  /* for */
1147 }  /* mark_address_calc */
1148
1149 /**
1150  * Post-walker that marks every node that is an address calculation.
1151  *
1152  * Users of a node must be visited first. We ensure this by
1153  * calling it in the post of an outs walk. This should work even in cycles,
1154  * while the normal pre-walk will not.
1155  */
1156 static void count_adr_ops(ir_node *node, void *env)
1157 {
1158         graph_entry_t *graph = (graph_entry_t*)env;
1159         unsigned mark        = get_adr_mark(graph, node);
1160
1161         if (mark & MARK_ADDRESS_CALC)
1162                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1163         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1164                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1165         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1166                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1167 }  /* count_adr_ops */
1168
1169 /**
1170  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1171  * is called, must recalculate all statistic info.
1172  *
1173  * @param global    The global entry
1174  * @param graph     The current entry
1175  */
1176 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1177 {
1178         node_entry_t *entry;
1179         int i;
1180
1181         /* clear first the alive counter in the graph */
1182         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1183                 cnt_clr(&entry->cnt_alive);
1184         }  /* foreach_pset */
1185
1186         /* set pessimistic values */
1187         graph->is_leaf       = 1;
1188         graph->is_leaf_call  = LCS_UNKNOWN;
1189         graph->is_recursive  = 0;
1190         graph->is_chain_call = 1;
1191         graph->is_strict     = 1;
1192
1193         /* create new block counter */
1194         graph->block_hash = new_pset(block_cmp, 5);
1195
1196         /* we need dominator info */
1197         if (graph->irg != get_const_code_irg()) {
1198                 assure_doms(graph->irg);
1199
1200                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1201                         /* we need extended basic blocks */
1202                         compute_extbb(graph->irg);
1203
1204                         /* create new extbb counter */
1205                         graph->extbb_hash = new_pset(block_cmp, 5);
1206                 }  /* if */
1207         }  /* if */
1208
1209         /* count the nodes in the graph */
1210         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1211
1212 #if 0
1213         /* Uncomment this code if chain-call means call exact one. */
1214         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1215
1216         /* check if we have more than 1 call */
1217         if (cnt_gt(entry->cnt_alive, 1))
1218                 graph->is_chain_call = 0;
1219 #endif
1220
1221         /* recursive functions are never chain calls, leafs don't have calls */
1222         if (graph->is_recursive || graph->is_leaf)
1223                 graph->is_chain_call = 0;
1224
1225         /* assume we walk every graph only ONCE, we could sum here the global count */
1226         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1227                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1228
1229                 /* update the node counter */
1230                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1231         }  /* foreach_pset */
1232
1233         /* count the number of address calculation */
1234         if (graph->irg != get_const_code_irg()) {
1235                 ir_graph *rem = current_ir_graph;
1236
1237                 assure_irg_outs(graph->irg);
1238
1239                 /* Must be done an the outs graph */
1240                 current_ir_graph = graph->irg;
1241                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1242                 current_ir_graph = rem;
1243
1244 #ifdef DUMP_ADR_MODE
1245                 /* register the vcg hook and dump the graph for test */
1246                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1247                 dump_ir_block_graph(graph->irg, "-adr");
1248                 set_dump_node_vcgattr_hook(NULL);
1249 #endif /* DUMP_ADR_MODE */
1250
1251                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1252         }  /* if */
1253
1254         /* count the DAG's */
1255         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1256                 count_dags_in_graph(global, graph);
1257
1258         /* calculate the patterns of this graph */
1259         stat_calc_pattern_history(graph->irg);
1260
1261         /* leaf function did not call others */
1262         if (graph->is_leaf)
1263                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1264         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1265                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1266                 pdeq_putl(status->wait_q, graph);
1267         }  /* if */
1268
1269         /* we have analyzed this graph */
1270         graph->is_analyzed = 1;
1271
1272         /* accumulate all counter's */
1273         for (i = 0; i < _gcnt_last; ++i)
1274                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1275 }  /* update_graph_stat */
1276
1277 /**
1278  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1279  * must finish all statistic info calculations.
1280  *
1281  * @param global    The global entry
1282  * @param graph     The current entry
1283  */
1284 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1285 {
1286         (void) global;
1287         if (graph->is_deleted) {
1288                 /* deleted, ignore */
1289                 return;
1290         }
1291
1292         if (graph->irg) {
1293                 /* count the nodes in the graph */
1294                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1295
1296                 if (graph->is_leaf_call == LCS_UNKNOWN)
1297                         graph->is_leaf_call = LCS_LEAF_CALL;
1298         }  /* if */
1299 }  /* update_graph_stat_2 */
1300
1301 /**
1302  * Register a dumper.
1303  */
1304 static void stat_register_dumper(const dumper_t *dumper)
1305 {
1306         dumper_t *p = XMALLOC(dumper_t);
1307         *p = *dumper;
1308
1309         p->next        = status->dumper;
1310         p->status      = status;
1311         status->dumper = p;
1312
1313         /* FIXME: memory leak */
1314 }  /* stat_register_dumper */
1315
1316 /**
1317  * Dumps the statistics of an IR graph.
1318  */
1319 static void stat_dump_graph(graph_entry_t *entry)
1320 {
1321         dumper_t *dumper;
1322
1323         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1324                 if (dumper->dump_graph)
1325                         dumper->dump_graph(dumper, entry);
1326         }  /* for */
1327 }  /* stat_dump_graph */
1328
1329 /**
1330  * Calls all registered dumper functions.
1331  */
1332 static void stat_dump_registered(graph_entry_t *entry)
1333 {
1334         dumper_t *dumper;
1335
1336         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1337                 if (dumper->func_map) {
1338                         dump_graph_FUNC func;
1339
1340                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1341                                 func(dumper, entry);
1342                 }  /* if */
1343         }  /* for */
1344 }  /* stat_dump_registered */
1345
1346 /**
1347  * Dumps a constant table.
1348  */
1349 static void stat_dump_consts(const constant_info_t *tbl)
1350 {
1351         dumper_t *dumper;
1352
1353         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1354                 if (dumper->dump_const_tbl)
1355                         dumper->dump_const_tbl(dumper, tbl);
1356         }  /* for */
1357 }  /* stat_dump_consts */
1358
1359 /**
1360  * Dumps the parameter distribution
1361  */
1362 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1363 {
1364         dumper_t *dumper;
1365
1366         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1367                 if (dumper->dump_param_tbl)
1368                         dumper->dump_param_tbl(dumper, tbl, global);
1369         }  /* for */
1370 }  /* stat_dump_param_tbl */
1371
1372 /**
1373  * Dumps the optimization counter
1374  */
1375 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1376 {
1377         dumper_t *dumper;
1378
1379         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1380                 if (dumper->dump_opt_cnt)
1381                         dumper->dump_opt_cnt(dumper, tbl, len);
1382         }  /* for */
1383 }  /* stat_dump_opt_cnt */
1384
1385 /**
1386  * Initialize the dumper.
1387  */
1388 static void stat_dump_init(const char *name)
1389 {
1390         dumper_t *dumper;
1391
1392         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1393                 if (dumper->init)
1394                         dumper->init(dumper, name);
1395         }  /* for */
1396 }  /* stat_dump_init */
1397
1398 /**
1399  * Finish the dumper.
1400  */
1401 static void stat_dump_finish(void)
1402 {
1403         dumper_t *dumper;
1404
1405         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1406                 if (dumper->finish)
1407                         dumper->finish(dumper);
1408         }  /* for */
1409 }  /* stat_dump_finish */
1410
1411 /**
1412  * Register an additional function for all dumper.
1413  */
1414 void stat_register_dumper_func(dump_graph_FUNC func)
1415 {
1416         dumper_t *dumper;
1417
1418         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1419                 if (! dumper->func_map)
1420                         dumper->func_map = pset_new_ptr(3);
1421                 pset_insert_ptr(dumper->func_map, (void*)func);
1422         }  /* for */
1423 }  /* stat_register_dumper_func */
1424
1425 /* ---------------------------------------------------------------------- */
1426
1427 /*
1428  * Helper: get an ir_op from an opcode.
1429  */
1430 ir_op *stat_get_op_from_opcode(unsigned code)
1431 {
1432         return opcode_find_entry(code, status->ir_op_hash);
1433 }  /* stat_get_op_from_opcode */
1434
1435 /**
1436  * Hook: A new IR op is registered.
1437  *
1438  * @param ctx  the hook context
1439  * @param op   the new IR opcode that was created.
1440  */
1441 static void stat_new_ir_op(void *ctx, ir_op *op)
1442 {
1443         (void) ctx;
1444         if (! status->stat_options)
1445                 return;
1446
1447         STAT_ENTER;
1448         {
1449                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1450
1451                 /* execute for side effect :-) */
1452                 (void)opcode_get_entry(op, graph->opcode_hash);
1453
1454                 pset_insert(status->ir_op_hash, op, op->code);
1455         }
1456         STAT_LEAVE;
1457 }  /* stat_new_ir_op */
1458
1459 /**
1460  * Hook: An IR op is freed.
1461  *
1462  * @param ctx  the hook context
1463  * @param op   the IR opcode that is freed
1464  */
1465 static void stat_free_ir_op(void *ctx, ir_op *op)
1466 {
1467         (void) ctx;
1468         (void) op;
1469         if (! status->stat_options)
1470                 return;
1471
1472         STAT_ENTER;
1473         {
1474         }
1475         STAT_LEAVE;
1476 }  /* stat_free_ir_op */
1477
1478 /**
1479  * Hook: A new node is created.
1480  *
1481  * @param ctx   the hook context
1482  * @param irg   the IR graph on which the node is created
1483  * @param node  the new IR node that was created
1484  */
1485 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1486 {
1487         (void) ctx;
1488         (void) irg;
1489         if (! status->stat_options)
1490                 return;
1491
1492         /* do NOT count during dead node elimination */
1493         if (status->in_dead_node_elim)
1494                 return;
1495
1496         STAT_ENTER;
1497         {
1498                 node_entry_t *entry;
1499                 graph_entry_t *graph;
1500                 ir_op *op = stat_get_irn_op(node);
1501
1502                 /* increase global value */
1503                 graph = graph_get_entry(NULL, status->irg_hash);
1504                 entry = opcode_get_entry(op, graph->opcode_hash);
1505                 cnt_inc(&entry->new_node);
1506
1507                 /* increase local value */
1508                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1509                 entry = opcode_get_entry(op, graph->opcode_hash);
1510                 cnt_inc(&entry->new_node);
1511         }
1512         STAT_LEAVE;
1513 }  /* stat_new_node */
1514
1515 /**
1516  * Hook: A node is changed into a Id node
1517  *
1518  * @param ctx   the hook context
1519  * @param node  the IR node that will be turned into an ID
1520  */
1521 static void stat_turn_into_id(void *ctx, ir_node *node)
1522 {
1523         (void) ctx;
1524         if (! status->stat_options)
1525                 return;
1526
1527         STAT_ENTER;
1528         {
1529                 node_entry_t *entry;
1530                 graph_entry_t *graph;
1531                 ir_op *op = stat_get_irn_op(node);
1532
1533                 /* increase global value */
1534                 graph = graph_get_entry(NULL, status->irg_hash);
1535                 entry = opcode_get_entry(op, graph->opcode_hash);
1536                 cnt_inc(&entry->into_Id);
1537
1538                 /* increase local value */
1539                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1540                 entry = opcode_get_entry(op, graph->opcode_hash);
1541                 cnt_inc(&entry->into_Id);
1542         }
1543         STAT_LEAVE;
1544 }  /* stat_turn_into_id */
1545
1546 /**
1547  * Hook: A node is normalized
1548  *
1549  * @param ctx   the hook context
1550  * @param node  the IR node that was normalized
1551  */
1552 static void stat_normalize(void *ctx, ir_node *node)
1553 {
1554         (void) ctx;
1555         if (! status->stat_options)
1556                 return;
1557
1558         STAT_ENTER;
1559         {
1560                 node_entry_t *entry;
1561                 graph_entry_t *graph;
1562                 ir_op *op = stat_get_irn_op(node);
1563
1564                 /* increase global value */
1565                 graph = graph_get_entry(NULL, status->irg_hash);
1566                 entry = opcode_get_entry(op, graph->opcode_hash);
1567                 cnt_inc(&entry->normalized);
1568
1569                 /* increase local value */
1570                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1571                 entry = opcode_get_entry(op, graph->opcode_hash);
1572                 cnt_inc(&entry->normalized);
1573         }
1574         STAT_LEAVE;
1575 }  /* stat_normalize */
1576
1577 /**
1578  * Hook: A new graph was created
1579  *
1580  * @param ctx  the hook context
1581  * @param irg  the new IR graph that was created
1582  * @param ent  the entity of this graph
1583  */
1584 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1585 {
1586         (void) ctx;
1587         if (! status->stat_options)
1588                 return;
1589
1590         STAT_ENTER;
1591         {
1592                 /* execute for side effect :-) */
1593                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1594
1595                 graph->ent           = ent;
1596                 graph->is_deleted    = 0;
1597                 graph->is_leaf       = 0;
1598                 graph->is_leaf_call  = 0;
1599                 graph->is_recursive  = 0;
1600                 graph->is_chain_call = 0;
1601                 graph->is_strict     = 1;
1602                 graph->is_analyzed   = 0;
1603         }
1604         STAT_LEAVE;
1605 }  /* stat_new_graph */
1606
1607 /**
1608  * Hook: A graph will be deleted
1609  *
1610  * @param ctx  the hook context
1611  * @param irg  the IR graph that will be deleted
1612  *
1613  * Note that we still hold the information for this graph
1614  * in our hash maps, only a flag is set which prevents this
1615  * information from being changed, it's "frozen" from now.
1616  */
1617 static void stat_free_graph(void *ctx, ir_graph *irg)
1618 {
1619         (void) ctx;
1620         if (! status->stat_options)
1621                 return;
1622
1623         STAT_ENTER;
1624         {
1625                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1626                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1627
1628                 graph->is_deleted = 1;
1629
1630                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1631                         /* count the nodes of the graph yet, it will be destroyed later */
1632                         update_graph_stat(global, graph);
1633                 }  /* if */
1634         }
1635         STAT_LEAVE;
1636 }  /* stat_free_graph */
1637
1638 /**
1639  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1640  *
1641  * @param ctx  the hook context
1642  * @param irg  the IR graph that will be walked
1643  * @param pre  the pre walker
1644  * @param post the post walker
1645  */
1646 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1647 {
1648         (void) ctx;
1649         (void) pre;
1650         (void) post;
1651         if (! status->stat_options)
1652                 return;
1653
1654         STAT_ENTER_SINGLE;
1655         {
1656                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1657
1658                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1659         }
1660         STAT_LEAVE;
1661 }  /* stat_irg_walk */
1662
1663 /**
1664  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1665  *
1666  * @param ctx  the hook context
1667  * @param irg  the IR graph that will be walked
1668  * @param pre  the pre walker
1669  * @param post the post walker
1670  */
1671 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1672 {
1673         /* for now, do NOT differentiate between blockwise and normal */
1674         stat_irg_walk(ctx, irg, pre, post);
1675 }  /* stat_irg_walk_blkwise */
1676
1677 /**
1678  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1679  *
1680  * @param ctx  the hook context
1681  * @param irg  the IR graph that will be walked
1682  * @param node the IR node
1683  * @param pre  the pre walker
1684  * @param post the post walker
1685  */
1686 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1687 {
1688         (void) ctx;
1689         (void) node;
1690         (void) pre;
1691         (void) post;
1692         if (! status->stat_options)
1693                 return;
1694
1695         STAT_ENTER_SINGLE;
1696         {
1697                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1698
1699                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1700         }
1701         STAT_LEAVE;
1702 }  /* stat_irg_block_walk */
1703
1704 /**
1705  * Called for every node that is removed due to an optimization.
1706  *
1707  * @param n     the IR node that will be removed
1708  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1709  * @param kind  the optimization kind
1710  */
1711 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1712 {
1713         opt_entry_t *entry;
1714         ir_op *op = stat_get_irn_op(n);
1715
1716         /* ignore CSE for Constants */
1717         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1718                 return;
1719
1720         /* increase global value */
1721         entry = opt_get_entry(op, hmap);
1722         cnt_inc(&entry->count);
1723 }  /* removed_due_opt */
1724
1725 /**
1726  * Hook: Some nodes were optimized into some others due to an optimization.
1727  *
1728  * @param ctx  the hook context
1729  */
1730 static void stat_merge_nodes(
1731     void *ctx,
1732     ir_node **new_node_array, int new_num_entries,
1733     ir_node **old_node_array, int old_num_entries,
1734     hook_opt_kind opt)
1735 {
1736         (void) ctx;
1737         if (! status->stat_options)
1738                 return;
1739
1740         STAT_ENTER;
1741         {
1742                 int i, j;
1743                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1744
1745                 cnt_inc(&status->num_opts[opt]);
1746                 if (status->reassoc_run)
1747                         opt = HOOK_OPT_REASSOC;
1748
1749                 for (i = 0; i < old_num_entries; ++i) {
1750                         /* nodes might be in new and old, so if we found a node
1751                            in both sets, this one  is NOT removed */
1752                         for (j = 0; j < new_num_entries; ++j) {
1753                                 if (old_node_array[i] == new_node_array[j])
1754                                         break;
1755                         }  /* for */
1756                         if (j >= new_num_entries) {
1757                                 int xopt = opt;
1758
1759                                 /* sometimes we did not detect, that it is replaced by a Const */
1760                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1761                                         ir_op *op = get_irn_op(new_node_array[0]);
1762
1763                                         if (op == op_Const || op == op_SymConst)
1764                                                 xopt = HOOK_OPT_CONFIRM_C;
1765                                 }  /* if */
1766
1767                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1768                         }  /* if */
1769                 }  /* for */
1770         }
1771         STAT_LEAVE;
1772 }  /* stat_merge_nodes */
1773
1774 /**
1775  * Hook: Reassociation is started/stopped.
1776  *
1777  * @param ctx   the hook context
1778  * @param flag  if non-zero, reassociation is started else stopped
1779  */
1780 static void stat_reassociate(void *ctx, int flag)
1781 {
1782         (void) ctx;
1783         if (! status->stat_options)
1784                 return;
1785
1786         STAT_ENTER;
1787         {
1788                 status->reassoc_run = flag;
1789         }
1790         STAT_LEAVE;
1791 }  /* stat_reassociate */
1792
1793 /**
1794  * Hook: A node was lowered into other nodes
1795  *
1796  * @param ctx  the hook context
1797  * @param node the IR node that will be lowered
1798  */
1799 static void stat_lower(void *ctx, ir_node *node)
1800 {
1801         (void) ctx;
1802         if (! status->stat_options)
1803                 return;
1804
1805         STAT_ENTER;
1806         {
1807                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1808
1809                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1810         }
1811         STAT_LEAVE;
1812 }  /* stat_lower */
1813
1814 /**
1815  * Hook: A graph was inlined.
1816  *
1817  * @param ctx  the hook context
1818  * @param call the IR call that will re changed into the body of
1819  *             the called IR graph
1820  * @param called_irg  the IR graph representing the called routine
1821  */
1822 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1823 {
1824         (void) ctx;
1825         if (! status->stat_options)
1826                 return;
1827
1828         STAT_ENTER;
1829         {
1830                 ir_graph *irg = get_irn_irg(call);
1831                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1832                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1833
1834                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1835                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1836         }
1837         STAT_LEAVE;
1838 }  /* stat_inline */
1839
1840 /**
1841  * Hook: A graph with tail-recursions was optimized.
1842  *
1843  * @param ctx  the hook context
1844  */
1845 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1846 {
1847         (void) ctx;
1848         if (! status->stat_options)
1849                 return;
1850
1851         STAT_ENTER;
1852         {
1853                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1854
1855                 graph->num_tail_recursion += n_calls;
1856         }
1857         STAT_LEAVE;
1858 }  /* stat_tail_rec */
1859
1860 /**
1861  * Strength reduction was performed on an iteration variable.
1862  *
1863  * @param ctx  the hook context
1864  */
1865 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1866 {
1867         (void) ctx;
1868         if (! status->stat_options)
1869                 return;
1870
1871         STAT_ENTER;
1872         {
1873                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1874                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1875
1876                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1877         }
1878         STAT_LEAVE;
1879 }  /* stat_strength_red */
1880
1881 /**
1882  * Hook: Start/Stop the dead node elimination.
1883  *
1884  * @param ctx  the hook context
1885  */
1886 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1887 {
1888         (void) ctx;
1889         (void) irg;
1890         if (! status->stat_options)
1891                 return;
1892
1893         status->in_dead_node_elim = (start != 0);
1894 }  /* stat_dead_node_elim */
1895
1896 /**
1897  * Hook: if-conversion was tried.
1898  */
1899 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1900                                int pos, ir_node *mux, if_result_t reason)
1901 {
1902         (void) context;
1903         (void) phi;
1904         (void) pos;
1905         (void) mux;
1906         if (! status->stat_options)
1907                 return;
1908
1909         STAT_ENTER;
1910         {
1911                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1912
1913                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1914         }
1915         STAT_LEAVE;
1916 }  /* stat_if_conversion */
1917
1918 /**
1919  * Hook: real function call was optimized.
1920  */
1921 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1922 {
1923         (void) context;
1924         (void) call;
1925         if (! status->stat_options)
1926                 return;
1927
1928         STAT_ENTER;
1929         {
1930                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1931
1932                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1933         }
1934         STAT_LEAVE;
1935 }  /* stat_func_call */
1936
1937 /**
1938  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1939  *
1940  * @param ctx  the hook context
1941  */
1942 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1943 {
1944         (void) ctx;
1945         if (! status->stat_options)
1946                 return;
1947
1948         STAT_ENTER;
1949         {
1950                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1951                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1952         }
1953         STAT_LEAVE;
1954 }  /* stat_arch_dep_replace_mul_with_shifts */
1955
1956 /**
1957  * Hook: A division by const was replaced.
1958  *
1959  * @param ctx   the hook context
1960  * @param node  the division node that will be optimized
1961  */
1962 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1963 {
1964         (void) ctx;
1965         if (! status->stat_options)
1966                 return;
1967
1968         STAT_ENTER;
1969         {
1970                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1971                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1972         }
1973         STAT_LEAVE;
1974 }  /* stat_arch_dep_replace_division_by_const */
1975
1976 /*
1977  * Update the register pressure of a block.
1978  *
1979  * @param irg        the irg containing the block
1980  * @param block      the block for which the reg pressure should be set
1981  * @param pressure   the pressure
1982  * @param class_name the name of the register class
1983  */
1984 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1985 {
1986         if (! status->stat_options)
1987                 return;
1988
1989         STAT_ENTER;
1990         {
1991                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1992                 be_block_entry_t     *block_ent;
1993                 reg_pressure_entry_t *rp_ent;
1994
1995                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1996                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
1997
1998                 rp_ent->class_name = class_name;
1999                 rp_ent->pressure   = pressure;
2000
2001                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2002         }
2003         STAT_LEAVE;
2004 }  /* stat_be_block_regpressure */
2005
2006 /**
2007  * Update the distribution of ready nodes of a block
2008  *
2009  * @param irg        the irg containing the block
2010  * @param block      the block for which the reg pressure should be set
2011  * @param num_ready  the number of ready nodes
2012  */
2013 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2014 {
2015         if (! status->stat_options)
2016                 return;
2017
2018         STAT_ENTER;
2019         {
2020                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2021                 be_block_entry_t *block_ent;
2022
2023                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2024
2025                 /* increase the counter of corresponding number of ready nodes */
2026                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2027         }
2028         STAT_LEAVE;
2029 }  /* stat_be_block_sched_ready */
2030
2031 /**
2032  * Update the permutation statistic of a block.
2033  *
2034  * @param class_name the name of the register class
2035  * @param n_regs     number of registers in the register class
2036  * @param perm       the perm node
2037  * @param block      the block containing the perm
2038  * @param size       the size of the perm
2039  * @param real_size  number of pairs with different registers
2040  */
2041 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2042                              int size, int real_size)
2043 {
2044         if (! status->stat_options)
2045                 return;
2046
2047         STAT_ENTER;
2048         {
2049                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2050                 be_block_entry_t   *block_ent;
2051                 perm_class_entry_t *pc_ent;
2052                 perm_stat_entry_t  *ps_ent;
2053
2054                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2055                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2056                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2057
2058                 pc_ent->n_regs = n_regs;
2059
2060                 /* update information */
2061                 ps_ent->size      = size;
2062                 ps_ent->real_size = real_size;
2063         }
2064         STAT_LEAVE;
2065 }  /* stat_be_block_stat_perm */
2066
2067 /**
2068  * Update the permutation statistic of a single perm.
2069  *
2070  * @param class_name the name of the register class
2071  * @param perm       the perm node
2072  * @param block      the block containing the perm
2073  * @param is_chain   1 if chain, 0 if cycle
2074  * @param size       length of the cycle/chain
2075  * @param n_ops      the number of ops representing this cycle/chain after lowering
2076  */
2077 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2078                                   int is_chain, int size, int n_ops)
2079 {
2080         if (! status->stat_options)
2081                 return;
2082
2083         STAT_ENTER;
2084         {
2085                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2086                 be_block_entry_t   *block_ent;
2087                 perm_class_entry_t *pc_ent;
2088                 perm_stat_entry_t  *ps_ent;
2089
2090                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2091                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2092                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2093
2094                 if (is_chain) {
2095                         ps_ent->n_copies += n_ops;
2096                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2097                 } else {
2098                         ps_ent->n_exchg += n_ops;
2099                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2100                 }  /* if */
2101         }
2102         STAT_LEAVE;
2103 }  /* stat_be_block_stat_permcycle */
2104
2105 /* Dumps a statistics snapshot. */
2106 void stat_dump_snapshot(const char *name, const char *phase)
2107 {
2108         char fname[2048];
2109         const char *p;
2110         size_t l;
2111
2112         if (! status->stat_options)
2113                 return;
2114
2115         STAT_ENTER;
2116         {
2117                 graph_entry_t *entry;
2118                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2119
2120                 /*
2121                  * The constant counter is only global, so we clear it here.
2122                  * Note that it does NOT contain the constants in DELETED
2123                  * graphs due to this.
2124                  */
2125                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2126                         stat_const_clear(status);
2127
2128                 /* build the name */
2129                 p = strrchr(name, '/');
2130 #ifdef _WIN32
2131                 {
2132                         const char *q;
2133
2134                         q = strrchr(name, '\\');
2135
2136                         /* NULL might be not the smallest pointer */
2137                         if (q && (!p || q > p))
2138                                 p = q;
2139                 }
2140 #endif /* _WIN32 */
2141                 if (p) {
2142                         ++p;
2143                         l = p - name;
2144
2145                         if (l > (int) (sizeof(fname) - 1))
2146                                 l = sizeof(fname) - 1;
2147
2148                         memcpy(fname, name, l);
2149                         fname[l] = '\0';
2150                 } else {
2151                         fname[0] = '\0';
2152                         p = name;
2153                 }  /* if */
2154                 strncat(fname, "firmstat-", sizeof(fname)-1);
2155                 strncat(fname, phase, sizeof(fname)-1);
2156                 strncat(fname, "-", sizeof(fname)-1);
2157                 strncat(fname, p, sizeof(fname)-1);
2158
2159                 stat_dump_init(fname);
2160
2161                 /* calculate the graph statistics */
2162                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2163                       entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2164                         if (entry->irg == NULL) {
2165                                 /* special entry for the global count */
2166                                 continue;
2167                         }  /* if */
2168                         if (! entry->is_deleted) {
2169                                 /* the graph is still alive, count the nodes on it */
2170                                 update_graph_stat(global, entry);
2171                         }  /* if */
2172                 }  /* for */
2173
2174                 /* some calculations are dependent, we pushed them on the wait_q */
2175                 while (! pdeq_empty(status->wait_q)) {
2176                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2177
2178                         update_graph_stat_2(global, entry);
2179                 }  /* while */
2180
2181                 /* dump per graph */
2182                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2183                      entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2184                         if (entry->irg == NULL) {
2185                                 /* special entry for the global count */
2186                                 continue;
2187                         }  /* if */
2188
2189                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2190                                 stat_dump_graph(entry);
2191                                 stat_dump_registered(entry);
2192                         }  /* if */
2193
2194                         if (! entry->is_deleted) {
2195                                 /* clear the counter that are not accumulated */
2196                                 graph_clear_entry(entry, 0);
2197                         }  /* if */
2198                 }  /* for */
2199
2200                 /* dump global */
2201                 stat_dump_graph(global);
2202
2203                 /* dump the const info */
2204                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2205                         stat_dump_consts(&status->const_info);
2206
2207                 /* dump the parameter distribution */
2208                 stat_dump_param_tbl(status->dist_param_cnt, global);
2209
2210                 /* dump the optimization counter and clear them */
2211                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2212                 clear_optimization_counter();
2213
2214                 stat_dump_finish();
2215
2216                 stat_finish_pattern_history(fname);
2217
2218                 /* clear the global counters here */
2219                 {
2220                         node_entry_t *entry;
2221
2222                         for (entry = (node_entry_t*)pset_first(global->opcode_hash);
2223                              entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) {
2224                                 opcode_clear_entry(entry);
2225                         }  /* for */
2226                         /* clear all global counter */
2227                         graph_clear_entry(global, /*all=*/1);
2228                 }
2229         }
2230         STAT_LEAVE;
2231 }  /* stat_dump_snapshot */
2232
2233 typedef struct pass_t {
2234         ir_prog_pass_t pass;
2235         const char     *fname;
2236         const char     *phase;
2237 } pass_t;
2238
2239 /**
2240  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2241  */
2242 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2243 {
2244         pass_t *pass = (pass_t*)context;
2245
2246         (void)irp;
2247         stat_dump_snapshot(pass->fname, pass->phase);
2248         return 0;
2249 }  /* stat_dump_snapshot_wrapper */
2250
2251 /**
2252  * Ensure that no verifier is run from the wrapper.
2253  */
2254 static int no_verify(ir_prog *prog, void *ctx)
2255 {
2256         (void)prog;
2257         (void)ctx;
2258         return 0;
2259 }
2260
2261 /**
2262  * Ensure that no dumper is run from the wrapper.
2263  */
2264 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2265 {
2266         (void)prog;
2267         (void)ctx;
2268         (void)idx;
2269 }
2270
2271 /* create an ir_pog pass */
2272 ir_prog_pass_t *stat_dump_snapshot_pass(
2273         const char *name, const char *fname, const char *phase)
2274 {
2275         pass_t *pass = XMALLOCZ(pass_t);
2276
2277         def_prog_pass_constructor(
2278                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2279         pass->fname = fname;
2280         pass->phase = phase;
2281
2282         /* no dump/verify */
2283         pass->pass.dump_irprog   = no_dump;
2284         pass->pass.verify_irprog = no_verify;
2285
2286         return &pass->pass;
2287 }  /* stat_dump_snapshot_pass */
2288
2289 /** the hook entries for the Firm statistics module */
2290 static hook_entry_t stat_hooks[hook_last];
2291
2292 /* initialize the statistics module. */
2293 void firm_init_stat(unsigned enable_options)
2294 {
2295 #define X(a)  a, sizeof(a)-1
2296 #define HOOK(h, fkt) \
2297         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2298         unsigned num = 0;
2299
2300         if (! (enable_options & FIRMSTAT_ENABLED))
2301                 return;
2302
2303         status = XMALLOCZ(stat_info_t);
2304
2305         /* enable statistics */
2306         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2307
2308         /* register all hooks */
2309         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2310         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2311         HOOK(hook_new_node,                           stat_new_node);
2312         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2313         HOOK(hook_normalize,                          stat_normalize);
2314         HOOK(hook_new_graph,                          stat_new_graph);
2315         HOOK(hook_free_graph,                         stat_free_graph);
2316         HOOK(hook_irg_walk,                           stat_irg_walk);
2317         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2318         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2319         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2320         HOOK(hook_reassociate,                        stat_reassociate);
2321         HOOK(hook_lower,                              stat_lower);
2322         HOOK(hook_inline,                             stat_inline);
2323         HOOK(hook_tail_rec,                           stat_tail_rec);
2324         HOOK(hook_strength_red,                       stat_strength_red);
2325         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2326         HOOK(hook_if_conversion,                      stat_if_conversion);
2327         HOOK(hook_func_call,                          stat_func_call);
2328         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2329         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2330
2331         obstack_init(&status->cnts);
2332         obstack_init(&status->be_data);
2333
2334         /* create the hash-tables */
2335         status->irg_hash   = new_pset(graph_cmp, 8);
2336         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2337
2338         /* create the wait queue */
2339         status->wait_q     = new_pdeq();
2340
2341         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2342                 /* build the pseudo-ops */
2343
2344                 _op_Phi0.code    = --num;
2345                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2346
2347                 _op_PhiM.code    = --num;
2348                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2349
2350                 _op_ProjM.code   = --num;
2351                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2352
2353                 _op_MulC.code    = --num;
2354                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2355
2356                 _op_DivC.code    = --num;
2357                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2358
2359                 _op_ModC.code    = --num;
2360                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2361
2362                 status->op_Phi0    = &_op_Phi0;
2363                 status->op_PhiM    = &_op_PhiM;
2364                 status->op_ProjM   = &_op_ProjM;
2365                 status->op_MulC    = &_op_MulC;
2366                 status->op_DivC    = &_op_DivC;
2367                 status->op_ModC    = &_op_ModC;
2368         } else {
2369                 status->op_Phi0    = NULL;
2370                 status->op_PhiM    = NULL;
2371                 status->op_ProjM   = NULL;
2372                 status->op_MulC    = NULL;
2373                 status->op_DivC    = NULL;
2374                 status->op_ModC    = NULL;
2375         }  /* if */
2376
2377         /* for Florian: count the Sel depth */
2378         if (enable_options & FIRMSTAT_COUNT_SELS) {
2379                 _op_SelSel.code    = --num;
2380                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2381
2382                 _op_SelSelSel.code = --num;
2383                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2384
2385                 status->op_SelSel    = &_op_SelSel;
2386                 status->op_SelSelSel = &_op_SelSelSel;
2387         } else {
2388                 status->op_SelSel    = NULL;
2389                 status->op_SelSelSel = NULL;
2390         }  /* if */
2391
2392         /* register the dumper */
2393         stat_register_dumper(&simple_dumper);
2394
2395         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2396                 stat_register_dumper(&csv_dumper);
2397
2398         /* initialize the pattern hash */
2399         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2400
2401         /* initialize the Const options */
2402         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2403                 stat_init_const_cnt(status);
2404
2405         /* distribution table for parameter counts */
2406         status->dist_param_cnt = stat_new_int_distrib_tbl();
2407
2408         clear_optimization_counter();
2409
2410 #undef HOOK
2411 #undef X
2412 }  /* firm_init_stat */
2413
2414 /**
2415  * Frees all dumper structures.
2416  */
2417 static void stat_term_dumper(void)
2418 {
2419         dumper_t *dumper, *next_dumper;
2420
2421         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2422                 if (dumper->func_map)
2423                         del_pset(dumper->func_map);
2424
2425                 next_dumper = dumper->next;
2426                 free(dumper);
2427                 dumper = next_dumper;
2428         }  /* for */
2429 }  /* stat_term_dumper */
2430
2431
2432 /* Terminates the statistics module, frees all memory. */
2433 void stat_term(void)
2434 {
2435         if (status != (stat_info_t *)&status_disable) {
2436                 obstack_free(&status->be_data, NULL);
2437                 obstack_free(&status->cnts, NULL);
2438
2439                 stat_term_dumper();
2440
2441                 xfree(status);
2442                 status = (stat_info_t *)&status_disable;
2443         }
2444 }  /* stat_term */
2445
2446 /* returns 1 if statistics were initialized, 0 otherwise */
2447 int stat_is_active(void)
2448 {
2449         return status != (stat_info_t *)&status_disable;
2450 }  /* stat_is_active */