8dabf65514f107b7188403b6baeb108c742d4caf
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2010 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44 #include "util.h"
45
46 /*
47  * need this to be static:
48  * Special pseudo Opcodes that we need to count some interesting cases
49  */
50
51 /**
52  * The Phi0, a node that is created during SSA construction
53  */
54 static ir_op _op_Phi0;
55
56 /** The PhiM, just to count memory Phi's. */
57 static ir_op _op_PhiM;
58
59 /** The Mul by Const node. */
60 static ir_op _op_MulC;
61
62 /** The Div by Const node. */
63 static ir_op _op_DivC;
64
65 /** The Div by Const node. */
66 static ir_op _op_ModC;
67
68 /** The Quot by Const node. */
69 static ir_op _op_QuotC;
70
71 /** The memory Proj node. */
72 static ir_op _op_ProjM;
73
74 /** A Sel of a Sel */
75 static ir_op _op_SelSel;
76
77 /** A Sel of a Sel of a Sel */
78 static ir_op _op_SelSelSel;
79
80 /* ---------------------------------------------------------------------------------- */
81
82 /** Marks the begin of a statistic (hook) function. */
83 #define STAT_ENTER    ++status->recursive
84
85 /** Marks the end of a statistic (hook) functions. */
86 #define STAT_LEAVE    --status->recursive
87
88 /** Allows to enter a statistic function only when we are not already in a hook. */
89 #define STAT_ENTER_SINGLE    do { if (status->recursive > 0) return; ++status->recursive; } while (0)
90
91 /**
92  * global status
93  */
94 static const unsigned status_disable = 0;
95 static stat_info_t *status = (stat_info_t *)&status_disable;
96
97 /**
98  * Compare two elements of the opcode hash.
99  */
100 static int opcode_cmp(const void *elt, const void *key)
101 {
102         const node_entry_t *e1 = (const node_entry_t*)elt;
103         const node_entry_t *e2 = (const node_entry_t*)key;
104
105         return e1->op->code - e2->op->code;
106 }  /* opcode_cmp */
107
108 /**
109  * Compare two elements of the graph hash.
110  */
111 static int graph_cmp(const void *elt, const void *key)
112 {
113         const graph_entry_t *e1 = (const graph_entry_t*)elt;
114         const graph_entry_t *e2 = (const graph_entry_t*)key;
115
116         return e1->irg != e2->irg;
117 }  /* graph_cmp */
118
119 /**
120  * Compare two elements of the optimization hash.
121  */
122 static int opt_cmp(const void *elt, const void *key)
123 {
124         const opt_entry_t *e1 = (const opt_entry_t*)elt;
125         const opt_entry_t *e2 = (const opt_entry_t*)key;
126
127         return e1->op->code != e2->op->code;
128 }  /* opt_cmp */
129
130 /**
131  * Compare two elements of the block/extbb hash.
132  */
133 static int block_cmp(const void *elt, const void *key)
134 {
135         const block_entry_t *e1 = (const block_entry_t*)elt;
136         const block_entry_t *e2 = (const block_entry_t*)key;
137
138         /* it's enough to compare the block number */
139         return e1->block_nr != e2->block_nr;
140 }  /* block_cmp */
141
142 /**
143  * Compare two elements of the be_block hash.
144  */
145 static int be_block_cmp(const void *elt, const void *key)
146 {
147         const be_block_entry_t *e1 = (const be_block_entry_t*)elt;
148         const be_block_entry_t *e2 = (const be_block_entry_t*)key;
149
150         return e1->block_nr != e2->block_nr;
151 }  /* be_block_cmp */
152
153 /**
154  * Compare two elements of reg pressure hash.
155  */
156 static int reg_pressure_cmp(const void *elt, const void *key)
157 {
158         const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt;
159         const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key;
160
161         return e1->class_name != e2->class_name;
162 }  /* reg_pressure_cmp */
163
164 /**
165  * Compare two elements of the perm_stat hash.
166  */
167 static int perm_stat_cmp(const void *elt, const void *key)
168 {
169         const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt;
170         const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key;
171
172         return e1->perm != e2->perm;
173 }  /* perm_stat_cmp */
174
175 /**
176  * Compare two elements of the perm_class hash.
177  */
178 static int perm_class_cmp(const void *elt, const void *key)
179 {
180         const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt;
181         const perm_class_entry_t *e2 = (const perm_class_entry_t*)key;
182
183         return e1->class_name != e2->class_name;
184 }  /* perm_class_cmp */
185
186 /**
187  * Compare two elements of the ir_op hash.
188  */
189 static int opcode_cmp_2(const void *elt, const void *key)
190 {
191         const ir_op *e1 = (const ir_op*)elt;
192         const ir_op *e2 = (const ir_op*)key;
193
194         return e1->code != e2->code;
195 }  /* opcode_cmp_2 */
196
197 /**
198  * Compare two elements of the address_mark set.
199  */
200 static int address_mark_cmp(const void *elt, const void *key, size_t size)
201 {
202         const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt;
203         const address_mark_entry_t *e2 = (const address_mark_entry_t*)key;
204         (void) size;
205
206         /* compare only the nodes, the rest is used as data container */
207         return e1->node != e2->node;
208 }  /* address_mark_cmp */
209
210 /**
211  * Clear all counter in a node_entry_t.
212  */
213 static void opcode_clear_entry(node_entry_t *elem)
214 {
215         cnt_clr(&elem->cnt_alive);
216         cnt_clr(&elem->new_node);
217         cnt_clr(&elem->into_Id);
218         cnt_clr(&elem->normalized);
219 }  /* opcode_clear_entry */
220
221 /**
222  * Returns the associates node_entry_t for an ir_op (and allocates
223  * one if not yet available).
224  *
225  * @param op    the IR operation
226  * @param hmap  a hash map containing ir_op* -> node_entry_t*
227  */
228 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap)
229 {
230         node_entry_t key;
231         node_entry_t *elem;
232
233         key.op = op;
234
235         elem = (node_entry_t*)pset_find(hmap, &key, op->code);
236         if (elem)
237                 return elem;
238
239         elem = OALLOCZ(&status->cnts, node_entry_t);
240
241         /* clear counter */
242         opcode_clear_entry(elem);
243
244         elem->op = op;
245
246         return (node_entry_t*)pset_insert(hmap, elem, op->code);
247 }  /* opcode_get_entry */
248
249 /**
250  * Returns the associates ir_op for an opcode
251  *
252  * @param code  the IR opcode
253  * @param hmap  the hash map containing opcode -> ir_op*
254  */
255 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap)
256 {
257         ir_op key;
258
259         key.code = code;
260         return (ir_op*)pset_find(hmap, &key, code);
261 }  /* opcode_find_entry */
262
263 /**
264  * Clears all counter in a graph_entry_t.
265  *
266  * @param elem  the graph entry
267  * @param all   if non-zero, clears all counters, else leave accumulated ones
268  */
269 static void graph_clear_entry(graph_entry_t *elem, int all)
270 {
271         int i;
272
273         /* clear accumulated / non-accumulated counter */
274         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
275                 cnt_clr(&elem->cnt[i]);
276         }  /* for */
277
278         if (elem->block_hash) {
279                 del_pset(elem->block_hash);
280                 elem->block_hash = NULL;
281         }  /* if */
282
283         if (elem->extbb_hash) {
284                 del_pset(elem->extbb_hash);
285                 elem->extbb_hash = NULL;
286         }  /* if */
287
288         obstack_free(&elem->recalc_cnts, NULL);
289         obstack_init(&elem->recalc_cnts);
290 }  /* graph_clear_entry */
291
292 /**
293  * Returns the associated graph_entry_t for an IR graph.
294  *
295  * @param irg   the IR graph, NULL for the global counter
296  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
297  */
298 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
299 {
300         graph_entry_t key;
301         graph_entry_t *elem;
302         size_t i;
303
304         key.irg = irg;
305
306         elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg));
307
308         if (elem) {
309                 /* create hash map backend block information */
310                 if (! elem->be_block_hash)
311                         elem->be_block_hash = new_pset(be_block_cmp, 5);
312
313                 return elem;
314         }  /* if */
315
316         /* allocate a new one */
317         elem = OALLOCZ(&status->cnts, graph_entry_t);
318         obstack_init(&elem->recalc_cnts);
319
320         /* clear counter */
321         graph_clear_entry(elem, 1);
322
323         /* new hash table for opcodes here  */
324         elem->opcode_hash   = new_pset(opcode_cmp, 5);
325         elem->address_mark  = new_set(address_mark_cmp, 5);
326         elem->irg           = irg;
327
328         /* these hash tables are created on demand */
329         elem->block_hash = NULL;
330         elem->extbb_hash = NULL;
331
332         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
333                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
334
335         return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg));
336 }  /* graph_get_entry */
337
338 /**
339  * Clear all counter in an opt_entry_t.
340  */
341 static void opt_clear_entry(opt_entry_t *elem)
342 {
343         cnt_clr(&elem->count);
344 }  /* opt_clear_entry */
345
346 /**
347  * Returns the associated opt_entry_t for an IR operation.
348  *
349  * @param op    the IR operation
350  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
351  */
352 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
353 {
354         opt_entry_t key;
355         opt_entry_t *elem;
356
357         key.op = op;
358
359         elem = (opt_entry_t*)pset_find(hmap, &key, op->code);
360         if (elem)
361                 return elem;
362
363         elem = OALLOCZ(&status->cnts, opt_entry_t);
364
365         /* clear new counter */
366         opt_clear_entry(elem);
367
368         elem->op = op;
369
370         return (opt_entry_t*)pset_insert(hmap, elem, op->code);
371 }  /* opt_get_entry */
372
373 /**
374  * clears all counter in a block_entry_t
375  */
376 static void block_clear_entry(block_entry_t *elem)
377 {
378         int i;
379
380         for (i = 0; i < _bcnt_last; ++i)
381                 cnt_clr(&elem->cnt[i]);
382 }  /* block_clear_entry */
383
384 /**
385  * Returns the associated block_entry_t for an block.
386  *
387  * @param block_nr  an IR  block number
388  * @param hmap      a hash map containing long -> block_entry_t
389  */
390 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
391 {
392         block_entry_t key;
393         block_entry_t *elem;
394
395         key.block_nr = block_nr;
396
397         elem = (block_entry_t*)pset_find(hmap, &key, block_nr);
398         if (elem)
399                 return elem;
400
401         elem = OALLOCZ(obst, block_entry_t);
402
403         /* clear new counter */
404         block_clear_entry(elem);
405
406         elem->block_nr = block_nr;
407
408         return (block_entry_t*)pset_insert(hmap, elem, block_nr);
409 }  /* block_get_entry */
410
411 /**
412  * Clear all sets in be_block_entry_t.
413  */
414 static void be_block_clear_entry(be_block_entry_t *elem)
415 {
416         if (elem->reg_pressure)
417                 del_pset(elem->reg_pressure);
418
419         if (elem->sched_ready)
420                 stat_delete_distrib_tbl(elem->sched_ready);
421
422         if (elem->perm_class_stat)
423                 del_pset(elem->perm_class_stat);
424
425         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
426         elem->sched_ready     = stat_new_int_distrib_tbl();
427         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
428 }  /* be_block_clear_entry */
429
430 /**
431  * Returns the associated be_block_entry_t for an block.
432  *
433  * @param block_nr  an IR  block number
434  * @param hmap      a hash map containing long -> be_block_entry_t
435  */
436 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
437 {
438         be_block_entry_t key;
439         be_block_entry_t *elem;
440
441         key.block_nr = block_nr;
442
443         elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr);
444         if (elem)
445                 return elem;
446
447         elem = OALLOCZ(obst, be_block_entry_t);
448
449         /* clear new counter */
450         be_block_clear_entry(elem);
451
452         elem->block_nr = block_nr;
453
454         return (be_block_entry_t*)pset_insert(hmap, elem, block_nr);
455 }  /* be_block_get_entry */
456
457 /**
458  * clears all sets in perm_class_entry_t
459  */
460 static void perm_class_clear_entry(perm_class_entry_t *elem)
461 {
462         if (elem->perm_stat)
463                 del_pset(elem->perm_stat);
464
465         elem->perm_stat = new_pset(perm_stat_cmp, 5);
466 }  /* perm_class_clear_entry */
467
468 /**
469  * Returns the associated perm_class entry for a register class.
470  *
471  * @param class_name  the register class name
472  * @param hmap        a hash map containing class_name -> perm_class_entry_t
473  */
474 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
475                                                 hmap_perm_class_entry_t *hmap)
476 {
477         perm_class_entry_t key;
478         perm_class_entry_t *elem;
479
480         key.class_name = class_name;
481
482         elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name));
483         if (elem)
484                 return elem;
485
486         elem = OALLOCZ(obst, perm_class_entry_t);
487
488         /* clear new counter */
489         perm_class_clear_entry(elem);
490
491         elem->class_name = class_name;
492
493         return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name));
494 }  /* perm_class_get_entry */
495
496 /**
497  * clears all sets in perm_stat_entry_t
498  */
499 static void perm_stat_clear_entry(perm_stat_entry_t *elem)
500 {
501         if (elem->chains)
502                 stat_delete_distrib_tbl(elem->chains);
503
504         if (elem->cycles)
505                 stat_delete_distrib_tbl(elem->cycles);
506
507         elem->chains = stat_new_int_distrib_tbl();
508         elem->cycles = stat_new_int_distrib_tbl();
509 }  /* perm_stat_clear_entry */
510
511 /**
512  * Returns the associated perm_stat entry for a perm.
513  *
514  * @param perm      the perm node
515  * @param hmap      a hash map containing perm -> perm_stat_entry_t
516  */
517 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
518 {
519         perm_stat_entry_t key;
520         perm_stat_entry_t *elem;
521
522         key.perm = perm;
523
524         elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm));
525         if (elem)
526                 return elem;
527
528         elem = OALLOCZ(obst, perm_stat_entry_t);
529
530         /* clear new counter */
531         perm_stat_clear_entry(elem);
532
533         elem->perm = perm;
534
535         return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm));
536 }  /* perm_stat_get_entry */
537
538 /**
539  * Clear optimizations counter,
540  */
541 static void clear_optimization_counter(void)
542 {
543         int i;
544         for (i = 0; i < FS_OPT_MAX; ++i)
545                 cnt_clr(&status->num_opts[i]);
546 }
547
548 /**
549  * Returns the ir_op for an IR-node,
550  * handles special cases and return pseudo op codes.
551  *
552  * @param none  an IR node
553  */
554 static ir_op *stat_get_irn_op(ir_node *node)
555 {
556         ir_op *op = get_irn_op(node);
557         unsigned opc = op->code;
558
559         switch (opc) {
560         case iro_Phi:
561                 if (get_irn_arity(node) == 0) {
562                         /* special case, a Phi0 node, count on extra counter */
563                         op = status->op_Phi0 ? status->op_Phi0 : op;
564                 } else if (get_irn_mode(node) == mode_M) {
565                         /* special case, a Memory Phi node, count on extra counter */
566                         op = status->op_PhiM ? status->op_PhiM : op;
567                 }  /* if */
568                 break;
569         case iro_Proj:
570                 if (get_irn_mode(node) == mode_M) {
571                         /* special case, a Memory Proj node, count on extra counter */
572                         op = status->op_ProjM ? status->op_ProjM : op;
573                 }  /* if */
574                 break;
575         case iro_Mul:
576                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
577                         /* special case, a Multiply by a const, count on extra counter */
578                         op = status->op_MulC ? status->op_MulC : op;
579                 }  /* if */
580                 break;
581         case iro_Div:
582                 if (is_Const(get_Div_right(node))) {
583                         /* special case, a division by a const, count on extra counter */
584                         op = status->op_DivC ? status->op_DivC : op;
585                 }  /* if */
586                 break;
587         case iro_Mod:
588                 if (is_Const(get_Mod_right(node))) {
589                         /* special case, a module by a const, count on extra counter */
590                         op = status->op_ModC ? status->op_ModC : op;
591                 }  /* if */
592                 break;
593         case iro_Quot:
594                 if (is_Const(get_Quot_right(node))) {
595                         /* special case, a floating point division by a const, count on extra counter */
596                         op = status->op_QuotC ? status->op_QuotC : op;
597                 }  /* if */
598                 break;
599         case iro_Sel:
600                 if (is_Sel(get_Sel_ptr(node))) {
601                         /* special case, a Sel of a Sel, count on extra counter */
602                         op = status->op_SelSel ? status->op_SelSel : op;
603                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
604                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
605                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
606                         }  /* if */
607                 }  /* if */
608                 break;
609         default:
610                 ;
611         }  /* switch */
612
613         return op;
614 }  /* stat_get_irn_op */
615
616 /**
617  * update the block counter
618  */
619 static void undate_block_info(ir_node *node, graph_entry_t *graph)
620 {
621         ir_op *op = get_irn_op(node);
622         ir_node *block;
623         block_entry_t *b_entry;
624         int i, arity;
625
626         /* check for block */
627         if (op == op_Block) {
628                 arity = get_irn_arity(node);
629                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
630                 /* mark start end block to allow to filter them out */
631                 if (node == get_irg_start_block(graph->irg))
632                         b_entry->is_start = 1;
633                 else if (node == get_irg_end_block(graph->irg))
634                         b_entry->is_end = 1;
635
636                 /* count all incoming edges */
637                 for (i = 0; i < arity; ++i) {
638                         ir_node *pred = get_irn_n(node, i);
639                         ir_node *other_block = get_nodes_block(pred);
640                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
641
642                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
643                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
644                 }  /* for */
645                 return;
646         }  /* if */
647
648         block   = get_nodes_block(node);
649         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
650
651         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
652                 /* count data Phi per block */
653                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
654         }  /* if */
655
656         /* we have a new node in our block */
657         cnt_inc(&b_entry->cnt[bcnt_nodes]);
658
659         /* don't count keep-alive edges */
660         if (is_End(node))
661                 return;
662
663         arity = get_irn_arity(node);
664
665         for (i = 0; i < arity; ++i) {
666                 ir_node *pred = get_irn_n(node, i);
667                 ir_node *other_block;
668
669                 other_block = get_nodes_block(pred);
670
671                 if (other_block == block)
672                         cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */
673                 else {
674                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
675
676                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
677                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
678                 }  /* if */
679         }  /* for */
680 }  /* undate_block_info */
681
682 /**
683  * Update the extended block counter.
684  */
685 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
686 {
687         ir_op *op = get_irn_op(node);
688         ir_extblk *extbb;
689         extbb_entry_t *eb_entry;
690         int i, arity;
691
692         /* check for block */
693         if (op == op_Block) {
694                 extbb = get_nodes_extbb(node);
695                 arity = get_irn_arity(node);
696                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
697
698                 /* count all incoming edges */
699                 for (i = 0; i < arity; ++i) {
700                         ir_node *pred = get_irn_n(node, i);
701                         ir_extblk *other_extbb = get_nodes_extbb(pred);
702
703                         if (extbb != other_extbb) {
704                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
705
706                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
707                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
708                         }  /* if */
709                 }  /* for */
710                 return;
711         }  /* if */
712
713         extbb    = get_nodes_extbb(node);
714         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
715
716         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
717                 /* count data Phi per extbb */
718                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
719         }  /* if */
720
721         /* we have a new node in our block */
722         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
723
724         /* don't count keep-alive edges */
725         if (is_End(node))
726                 return;
727
728         arity = get_irn_arity(node);
729
730         for (i = 0; i < arity; ++i) {
731                 ir_node *pred = get_irn_n(node, i);
732                 ir_extblk *other_extbb = get_nodes_extbb(pred);
733
734                 if (other_extbb == extbb)
735                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
736                 else {
737                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
738
739                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
740                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
741                 }  /* if */
742         }  /* for */
743 }  /* update_extbb_info */
744
745 /**
746  * Calculates how many arguments of the call are const, updates
747  * param distribution.
748  */
749 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
750 {
751         int i, num_const_args = 0, num_local_adr = 0;
752         int n = get_Call_n_params(call);
753
754         for (i = 0; i < n; ++i) {
755                 ir_node *param = get_Call_param(call, i);
756
757                 if (is_irn_constlike(param))
758                         ++num_const_args;
759                 else if (is_Sel(param)) {
760                         ir_node *base = param;
761
762                         do {
763                                 base = get_Sel_ptr(base);
764                         } while (is_Sel(base));
765
766                         if (base == get_irg_frame(current_ir_graph))
767                                 ++num_local_adr;
768                 }
769
770         }  /* for */
771
772         if (num_const_args > 0)
773                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
774         if (num_const_args == n)
775                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
776         if (num_local_adr > 0)
777                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
778
779         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
780 }  /* analyse_params_of_Call */
781
782 /**
783  * Update info on calls.
784  *
785  * @param call   The call
786  * @param graph  The graph entry containing the call
787  */
788 static void stat_update_call(ir_node *call, graph_entry_t *graph)
789 {
790         ir_node   *block = get_nodes_block(call);
791         ir_node   *ptr = get_Call_ptr(call);
792         ir_entity *ent = NULL;
793         ir_graph  *callee = NULL;
794
795         /*
796          * If the block is bad, the whole subgraph will collapse later
797          * so do not count this call.
798          * This happens in dead code.
799          */
800         if (is_Bad(block))
801                 return;
802
803         cnt_inc(&graph->cnt[gcnt_all_calls]);
804
805         /* found a call, this function is not a leaf */
806         graph->is_leaf = 0;
807
808         if (is_SymConst(ptr)) {
809                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
810                         /* ok, we seems to know the entity */
811                         ent = get_SymConst_entity(ptr);
812                         callee = get_entity_irg(ent);
813
814                         /* it is recursive, if it calls at least once */
815                         if (callee == graph->irg)
816                                 graph->is_recursive = 1;
817                         if (callee == NULL)
818                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
819                 }  /* if */
820         } else {
821                 /* indirect call, be could not predict */
822                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
823
824                 /* NOT a leaf call */
825                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
826         }  /* if */
827
828         /* check, if it's a chain-call: Then, the call-block
829          * must dominate the end block. */
830         {
831                 ir_node *curr = get_irg_end_block(graph->irg);
832                 int depth = get_Block_dom_depth(block);
833
834                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
835                         curr = get_Block_idom(curr);
836
837                         if (! curr || !is_Block(curr))
838                                 break;
839                 }  /* for */
840
841                 if (curr != block)
842                         graph->is_chain_call = 0;
843         }
844
845         /* check, if the callee is a leaf */
846         if (callee) {
847                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
848
849                 if (called->is_analyzed) {
850                         if (! called->is_leaf)
851                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
852                 }  /* if */
853         }  /* if */
854
855         analyse_params_of_Call(graph, call);
856 }  /* stat_update_call */
857
858 /**
859  * Update info on calls for graphs on the wait queue.
860  */
861 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
862 {
863         ir_node   *block = get_nodes_block(call);
864         ir_node   *ptr = get_Call_ptr(call);
865         ir_entity *ent = NULL;
866         ir_graph  *callee = NULL;
867
868         /*
869          * If the block is bad, the whole subgraph will collapse later
870          * so do not count this call.
871          * This happens in dead code.
872          */
873         if (is_Bad(block))
874                 return;
875
876         if (is_SymConst(ptr)) {
877                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
878                         /* ok, we seems to know the entity */
879                         ent = get_SymConst_entity(ptr);
880                         callee = get_entity_irg(ent);
881                 }  /* if */
882         }  /* if */
883
884         /* check, if the callee is a leaf */
885         if (callee) {
886                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
887
888                 assert(called->is_analyzed);
889
890                 if (! called->is_leaf)
891                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
892         } else
893                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
894 }  /* stat_update_call_2 */
895
896 /**
897  * Find the base address and entity of an Sel node.
898  *
899  * @param sel  the node
900  *
901  * @return the base address.
902  */
903 static ir_node *find_base_adr(ir_node *sel)
904 {
905         ir_node *ptr = get_Sel_ptr(sel);
906
907         while (is_Sel(ptr)) {
908                 sel = ptr;
909                 ptr = get_Sel_ptr(sel);
910         }
911         return ptr;
912 }  /* find_base_adr */
913
914 /**
915  * Update info on Load/Store address statistics.
916  */
917 static void stat_update_address(ir_node *node, graph_entry_t *graph)
918 {
919         unsigned opc = get_irn_opcode(node);
920         ir_node *base;
921         ir_graph *irg;
922
923         switch (opc) {
924         case iro_SymConst:
925                 /* a global address */
926                 cnt_inc(&graph->cnt[gcnt_global_adr]);
927                 break;
928         case iro_Sel:
929                 base = find_base_adr(node);
930                 irg = current_ir_graph;
931                 if (base == get_irg_tls(irg)) {
932                         /* a TLS variable, like a global. */
933                         cnt_inc(&graph->cnt[gcnt_global_adr]);
934                 } else if (base == get_irg_frame(irg)) {
935                         /* a local Variable. */
936                         cnt_inc(&graph->cnt[gcnt_local_adr]);
937                 } else {
938                         /* Pointer access */
939                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
940                                 /* pointer access through parameter, check for THIS */
941                                 ir_entity *ent = get_irg_entity(irg);
942
943                                 if (ent != NULL) {
944                                         ir_type *ent_tp = get_entity_type(ent);
945
946                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
947                                                 if (get_Proj_proj(base) == 0) {
948                                                         /* THIS pointer */
949                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
950                                                         goto end_parameter;
951                                                 }  /* if */
952                                         }  /* if */
953                                 }  /* if */
954                                 /* other parameter */
955                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
956 end_parameter: ;
957                         } else {
958                                 /* unknown Pointer access */
959                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
960                         }  /* if */
961                 }  /* if */
962         default:
963                 ;
964         }  /* switch */
965 }  /* stat_update_address */
966
967 /**
968  * Walker for reachable nodes count.
969  */
970 static void update_node_stat(ir_node *node, void *env)
971 {
972         graph_entry_t *graph = (graph_entry_t*)env;
973         node_entry_t *entry;
974
975         ir_op *op = stat_get_irn_op(node);
976         int i, arity = get_irn_arity(node);
977
978         entry = opcode_get_entry(op, graph->opcode_hash);
979
980         cnt_inc(&entry->cnt_alive);
981         cnt_add_i(&graph->cnt[gcnt_edges], arity);
982
983         /* count block edges */
984         undate_block_info(node, graph);
985
986         /* count extended block edges */
987         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
988                 if (graph->irg != get_const_code_irg())
989                         update_extbb_info(node, graph);
990         }  /* if */
991
992         /* handle statistics for special node types */
993
994         switch (op->code) {
995         case iro_Call:
996                 /* check for properties that depends on calls like recursion/leaf/indirect call */
997                 stat_update_call(node, graph);
998                 break;
999         case iro_Load:
1000                 /* check address properties */
1001                 stat_update_address(get_Load_ptr(node), graph);
1002                 break;
1003         case iro_Store:
1004                 /* check address properties */
1005                 stat_update_address(get_Store_ptr(node), graph);
1006                 break;
1007         case iro_Phi:
1008                 /* check for non-strict Phi nodes */
1009                 for (i = arity - 1; i >= 0; --i) {
1010                         ir_node *pred = get_Phi_pred(node, i);
1011                         if (is_Unknown(pred)) {
1012                                 /* found an Unknown predecessor, graph is not strict */
1013                                 graph->is_strict = 0;
1014                                 break;
1015                         }
1016                 }
1017         default:
1018                 ;
1019         }  /* switch */
1020
1021         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1022         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1023                 int i;
1024
1025                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1026                         ir_node *pred = get_irn_n(node, i);
1027
1028                         if (is_Const(pred)) {
1029                                 /* check properties of constants */
1030                                 stat_update_const(status, pred, graph);
1031                         }  /* if */
1032                 }  /* for */
1033         }  /* if */
1034 }  /* update_node_stat */
1035
1036 /**
1037  * Walker for reachable nodes count for graphs on the wait_q.
1038  */
1039 static void update_node_stat_2(ir_node *node, void *env)
1040 {
1041         graph_entry_t *graph = (graph_entry_t*)env;
1042
1043         /* check for properties that depends on calls like recursion/leaf/indirect call */
1044         if (is_Call(node))
1045                 stat_update_call_2(node, graph);
1046 }  /* update_node_stat_2 */
1047
1048 /**
1049  * Get the current address mark.
1050  */
1051 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
1052 {
1053         address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1054
1055         return value ? value->mark : 0;
1056 }  /* get_adr_mark */
1057
1058 /**
1059  * Set the current address mark.
1060  */
1061 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
1062 {
1063         address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1064
1065         value->mark = val;
1066 }  /* set_adr_mark */
1067
1068 #undef DUMP_ADR_MODE
1069
1070 #ifdef DUMP_ADR_MODE
1071 /**
1072  * a vcg attribute hook: Color a node with a different color if
1073  * it's identified as a part of an address expression or at least referenced
1074  * by an address expression.
1075  */
1076 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1077 {
1078         ir_node *n           = local ? local : node;
1079         ir_graph *irg        = get_irn_irg(n);
1080         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1081         unsigned mark        = get_adr_mark(graph, n);
1082
1083         if (mark & MARK_ADDRESS_CALC)
1084                 fprintf(F, "color: purple");
1085         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1086                 fprintf(F, "color: pink");
1087         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1088                 fprintf(F, "color: lightblue");
1089         else
1090                 return 0;
1091
1092         /* I know the color! */
1093         return 1;
1094 }  /* stat_adr_mark_hook */
1095 #endif /* DUMP_ADR_MODE */
1096
1097 /**
1098  * Return the "operational" mode of a Firm node.
1099  */
1100 static ir_mode *get_irn_op_mode(ir_node *node)
1101 {
1102         switch (get_irn_opcode(node)) {
1103         case iro_Load:
1104                 return get_Load_mode(node);
1105         case iro_Store:
1106                 return get_irn_mode(get_Store_value(node));
1107         case iro_Div:
1108                 return get_irn_mode(get_Div_left(node));
1109         case iro_Mod:
1110                 return get_irn_mode(get_Mod_left(node));
1111         case iro_Cmp:
1112                 /* Cmp is no address calculation, or is it? */
1113         default:
1114                 return get_irn_mode(node);
1115         }  /* switch */
1116 }  /* get_irn_op_mode */
1117
1118 /**
1119  * Post-walker that marks every node that is an address calculation.
1120  *
1121  * Users of a node must be visited first. We ensure this by
1122  * calling it in the post of an outs walk. This should work even in cycles,
1123  * while the normal pre-walk will not.
1124  */
1125 static void mark_address_calc(ir_node *node, void *env)
1126 {
1127         graph_entry_t *graph = (graph_entry_t*)env;
1128         ir_mode *mode = get_irn_op_mode(node);
1129         int i, n;
1130         unsigned mark_preds = MARK_REF_NON_ADR;
1131
1132         if (! mode_is_data(mode))
1133                 return;
1134
1135         if (mode_is_reference(mode)) {
1136                 /* a reference is calculated here, we are sure */
1137                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1138
1139                 mark_preds = MARK_REF_ADR;
1140         } else {
1141                 unsigned mark = get_adr_mark(graph, node);
1142
1143                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1144                         /*
1145                          * this node has no reference mode, but is only
1146                          * referenced by address calculations
1147                          */
1148                         mark_preds = MARK_REF_ADR;
1149                 }  /* if */
1150         }  /* if */
1151
1152         /* mark all predecessors */
1153         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1154                 ir_node *pred = get_irn_n(node, i);
1155
1156                 mode = get_irn_op_mode(pred);
1157                 if (! mode_is_data(mode))
1158                         continue;
1159
1160                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1161         }  /* for */
1162 }  /* mark_address_calc */
1163
1164 /**
1165  * Post-walker that marks every node that is an address calculation.
1166  *
1167  * Users of a node must be visited first. We ensure this by
1168  * calling it in the post of an outs walk. This should work even in cycles,
1169  * while the normal pre-walk will not.
1170  */
1171 static void count_adr_ops(ir_node *node, void *env)
1172 {
1173         graph_entry_t *graph = (graph_entry_t*)env;
1174         unsigned mark        = get_adr_mark(graph, node);
1175
1176         if (mark & MARK_ADDRESS_CALC)
1177                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1178         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1179                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1180         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1181                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1182 }  /* count_adr_ops */
1183
1184 /**
1185  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1186  * is called, must recalculate all statistic info.
1187  *
1188  * @param global    The global entry
1189  * @param graph     The current entry
1190  */
1191 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1192 {
1193         node_entry_t *entry;
1194         int i;
1195
1196         /* clear first the alive counter in the graph */
1197         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1198                 cnt_clr(&entry->cnt_alive);
1199         }  /* foreach_pset */
1200
1201         /* set pessimistic values */
1202         graph->is_leaf       = 1;
1203         graph->is_leaf_call  = LCS_UNKNOWN;
1204         graph->is_recursive  = 0;
1205         graph->is_chain_call = 1;
1206         graph->is_strict     = 1;
1207
1208         /* create new block counter */
1209         graph->block_hash = new_pset(block_cmp, 5);
1210
1211         /* we need dominator info */
1212         if (graph->irg != get_const_code_irg()) {
1213                 assure_doms(graph->irg);
1214
1215                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1216                         /* we need extended basic blocks */
1217                         compute_extbb(graph->irg);
1218
1219                         /* create new extbb counter */
1220                         graph->extbb_hash = new_pset(block_cmp, 5);
1221                 }  /* if */
1222         }  /* if */
1223
1224         /* count the nodes in the graph */
1225         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1226
1227 #if 0
1228         /* Uncomment this code if chain-call means call exact one. */
1229         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1230
1231         /* check if we have more than 1 call */
1232         if (cnt_gt(entry->cnt_alive, 1))
1233                 graph->is_chain_call = 0;
1234 #endif
1235
1236         /* recursive functions are never chain calls, leafs don't have calls */
1237         if (graph->is_recursive || graph->is_leaf)
1238                 graph->is_chain_call = 0;
1239
1240         /* assume we walk every graph only ONCE, we could sum here the global count */
1241         foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
1242                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1243
1244                 /* update the node counter */
1245                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1246         }  /* foreach_pset */
1247
1248         /* count the number of address calculation */
1249         if (graph->irg != get_const_code_irg()) {
1250                 ir_graph *rem = current_ir_graph;
1251
1252                 assure_irg_outs(graph->irg);
1253
1254                 /* Must be done an the outs graph */
1255                 current_ir_graph = graph->irg;
1256                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1257                 current_ir_graph = rem;
1258
1259 #ifdef DUMP_ADR_MODE
1260                 /* register the vcg hook and dump the graph for test */
1261                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1262                 dump_ir_block_graph(graph->irg, "-adr");
1263                 set_dump_node_vcgattr_hook(NULL);
1264 #endif /* DUMP_ADR_MODE */
1265
1266                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1267         }  /* if */
1268
1269         /* count the DAG's */
1270         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1271                 count_dags_in_graph(global, graph);
1272
1273         /* calculate the patterns of this graph */
1274         stat_calc_pattern_history(graph->irg);
1275
1276         /* leaf function did not call others */
1277         if (graph->is_leaf)
1278                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1279         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1280                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1281                 pdeq_putl(status->wait_q, graph);
1282         }  /* if */
1283
1284         /* we have analyzed this graph */
1285         graph->is_analyzed = 1;
1286
1287         /* accumulate all counter's */
1288         for (i = 0; i < _gcnt_last; ++i)
1289                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1290 }  /* update_graph_stat */
1291
1292 /**
1293  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1294  * must finish all statistic info calculations.
1295  *
1296  * @param global    The global entry
1297  * @param graph     The current entry
1298  */
1299 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1300 {
1301         (void) global;
1302         if (graph->is_deleted) {
1303                 /* deleted, ignore */
1304                 return;
1305         }
1306
1307         if (graph->irg) {
1308                 /* count the nodes in the graph */
1309                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1310
1311                 if (graph->is_leaf_call == LCS_UNKNOWN)
1312                         graph->is_leaf_call = LCS_LEAF_CALL;
1313         }  /* if */
1314 }  /* update_graph_stat_2 */
1315
1316 /**
1317  * Register a dumper.
1318  */
1319 static void stat_register_dumper(const dumper_t *dumper)
1320 {
1321         dumper_t *p = XMALLOC(dumper_t);
1322
1323         memcpy(p, dumper, sizeof(*p));
1324
1325         p->next        = status->dumper;
1326         p->status      = status;
1327         status->dumper = p;
1328
1329         /* FIXME: memory leak */
1330 }  /* stat_register_dumper */
1331
1332 /**
1333  * Dumps the statistics of an IR graph.
1334  */
1335 static void stat_dump_graph(graph_entry_t *entry)
1336 {
1337         dumper_t *dumper;
1338
1339         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1340                 if (dumper->dump_graph)
1341                         dumper->dump_graph(dumper, entry);
1342         }  /* for */
1343 }  /* stat_dump_graph */
1344
1345 /**
1346  * Calls all registered dumper functions.
1347  */
1348 static void stat_dump_registered(graph_entry_t *entry)
1349 {
1350         dumper_t *dumper;
1351
1352         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1353                 if (dumper->func_map) {
1354                         dump_graph_FUNC func;
1355
1356                         foreach_pset(dumper->func_map, dump_graph_FUNC, func)
1357                                 func(dumper, entry);
1358                 }  /* if */
1359         }  /* for */
1360 }  /* stat_dump_registered */
1361
1362 /**
1363  * Dumps a constant table.
1364  */
1365 static void stat_dump_consts(const constant_info_t *tbl)
1366 {
1367         dumper_t *dumper;
1368
1369         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1370                 if (dumper->dump_const_tbl)
1371                         dumper->dump_const_tbl(dumper, tbl);
1372         }  /* for */
1373 }  /* stat_dump_consts */
1374
1375 /**
1376  * Dumps the parameter distribution
1377  */
1378 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
1379 {
1380         dumper_t *dumper;
1381
1382         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1383                 if (dumper->dump_param_tbl)
1384                         dumper->dump_param_tbl(dumper, tbl, global);
1385         }  /* for */
1386 }  /* stat_dump_param_tbl */
1387
1388 /**
1389  * Dumps the optimization counter
1390  */
1391 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
1392 {
1393         dumper_t *dumper;
1394
1395         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1396                 if (dumper->dump_opt_cnt)
1397                         dumper->dump_opt_cnt(dumper, tbl, len);
1398         }  /* for */
1399 }  /* stat_dump_opt_cnt */
1400
1401 /**
1402  * Initialize the dumper.
1403  */
1404 static void stat_dump_init(const char *name)
1405 {
1406         dumper_t *dumper;
1407
1408         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1409                 if (dumper->init)
1410                         dumper->init(dumper, name);
1411         }  /* for */
1412 }  /* stat_dump_init */
1413
1414 /**
1415  * Finish the dumper.
1416  */
1417 static void stat_dump_finish(void)
1418 {
1419         dumper_t *dumper;
1420
1421         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1422                 if (dumper->finish)
1423                         dumper->finish(dumper);
1424         }  /* for */
1425 }  /* stat_dump_finish */
1426
1427 /**
1428  * Register an additional function for all dumper.
1429  */
1430 void stat_register_dumper_func(dump_graph_FUNC func)
1431 {
1432         dumper_t *dumper;
1433
1434         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1435                 if (! dumper->func_map)
1436                         dumper->func_map = pset_new_ptr(3);
1437                 pset_insert_ptr(dumper->func_map, (void*)func);
1438         }  /* for */
1439 }  /* stat_register_dumper_func */
1440
1441 /* ---------------------------------------------------------------------- */
1442
1443 /*
1444  * Helper: get an ir_op from an opcode.
1445  */
1446 ir_op *stat_get_op_from_opcode(ir_opcode code)
1447 {
1448         return opcode_find_entry(code, status->ir_op_hash);
1449 }  /* stat_get_op_from_opcode */
1450
1451 /**
1452  * Hook: A new IR op is registered.
1453  *
1454  * @param ctx  the hook context
1455  * @param op   the new IR opcode that was created.
1456  */
1457 static void stat_new_ir_op(void *ctx, ir_op *op)
1458 {
1459         (void) ctx;
1460         if (! status->stat_options)
1461                 return;
1462
1463         STAT_ENTER;
1464         {
1465                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1466
1467                 /* execute for side effect :-) */
1468                 (void)opcode_get_entry(op, graph->opcode_hash);
1469
1470                 pset_insert(status->ir_op_hash, op, op->code);
1471         }
1472         STAT_LEAVE;
1473 }  /* stat_new_ir_op */
1474
1475 /**
1476  * Hook: An IR op is freed.
1477  *
1478  * @param ctx  the hook context
1479  * @param op   the IR opcode that is freed
1480  */
1481 static void stat_free_ir_op(void *ctx, ir_op *op)
1482 {
1483         (void) ctx;
1484         (void) op;
1485         if (! status->stat_options)
1486                 return;
1487
1488         STAT_ENTER;
1489         {
1490         }
1491         STAT_LEAVE;
1492 }  /* stat_free_ir_op */
1493
1494 /**
1495  * Hook: A new node is created.
1496  *
1497  * @param ctx   the hook context
1498  * @param irg   the IR graph on which the node is created
1499  * @param node  the new IR node that was created
1500  */
1501 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node)
1502 {
1503         (void) ctx;
1504         (void) irg;
1505         if (! status->stat_options)
1506                 return;
1507
1508         /* do NOT count during dead node elimination */
1509         if (status->in_dead_node_elim)
1510                 return;
1511
1512         STAT_ENTER;
1513         {
1514                 node_entry_t *entry;
1515                 graph_entry_t *graph;
1516                 ir_op *op = stat_get_irn_op(node);
1517
1518                 /* increase global value */
1519                 graph = graph_get_entry(NULL, status->irg_hash);
1520                 entry = opcode_get_entry(op, graph->opcode_hash);
1521                 cnt_inc(&entry->new_node);
1522
1523                 /* increase local value */
1524                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1525                 entry = opcode_get_entry(op, graph->opcode_hash);
1526                 cnt_inc(&entry->new_node);
1527         }
1528         STAT_LEAVE;
1529 }  /* stat_new_node */
1530
1531 /**
1532  * Hook: A node is changed into a Id node
1533  *
1534  * @param ctx   the hook context
1535  * @param node  the IR node that will be turned into an ID
1536  */
1537 static void stat_turn_into_id(void *ctx, ir_node *node)
1538 {
1539         (void) ctx;
1540         if (! status->stat_options)
1541                 return;
1542
1543         STAT_ENTER;
1544         {
1545                 node_entry_t *entry;
1546                 graph_entry_t *graph;
1547                 ir_op *op = stat_get_irn_op(node);
1548
1549                 /* increase global value */
1550                 graph = graph_get_entry(NULL, status->irg_hash);
1551                 entry = opcode_get_entry(op, graph->opcode_hash);
1552                 cnt_inc(&entry->into_Id);
1553
1554                 /* increase local value */
1555                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1556                 entry = opcode_get_entry(op, graph->opcode_hash);
1557                 cnt_inc(&entry->into_Id);
1558         }
1559         STAT_LEAVE;
1560 }  /* stat_turn_into_id */
1561
1562 /**
1563  * Hook: A node is normalized
1564  *
1565  * @param ctx   the hook context
1566  * @param node  the IR node that was normalized
1567  */
1568 static void stat_normalize(void *ctx, ir_node *node)
1569 {
1570         (void) ctx;
1571         if (! status->stat_options)
1572                 return;
1573
1574         STAT_ENTER;
1575         {
1576                 node_entry_t *entry;
1577                 graph_entry_t *graph;
1578                 ir_op *op = stat_get_irn_op(node);
1579
1580                 /* increase global value */
1581                 graph = graph_get_entry(NULL, status->irg_hash);
1582                 entry = opcode_get_entry(op, graph->opcode_hash);
1583                 cnt_inc(&entry->normalized);
1584
1585                 /* increase local value */
1586                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1587                 entry = opcode_get_entry(op, graph->opcode_hash);
1588                 cnt_inc(&entry->normalized);
1589         }
1590         STAT_LEAVE;
1591 }  /* stat_normalize */
1592
1593 /**
1594  * Hook: A new graph was created
1595  *
1596  * @param ctx  the hook context
1597  * @param irg  the new IR graph that was created
1598  * @param ent  the entity of this graph
1599  */
1600 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent)
1601 {
1602         (void) ctx;
1603         if (! status->stat_options)
1604                 return;
1605
1606         STAT_ENTER;
1607         {
1608                 /* execute for side effect :-) */
1609                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1610
1611                 graph->ent           = ent;
1612                 graph->is_deleted    = 0;
1613                 graph->is_leaf       = 0;
1614                 graph->is_leaf_call  = 0;
1615                 graph->is_recursive  = 0;
1616                 graph->is_chain_call = 0;
1617                 graph->is_strict     = 1;
1618                 graph->is_analyzed   = 0;
1619         }
1620         STAT_LEAVE;
1621 }  /* stat_new_graph */
1622
1623 /**
1624  * Hook: A graph will be deleted
1625  *
1626  * @param ctx  the hook context
1627  * @param irg  the IR graph that will be deleted
1628  *
1629  * Note that we still hold the information for this graph
1630  * in our hash maps, only a flag is set which prevents this
1631  * information from being changed, it's "frozen" from now.
1632  */
1633 static void stat_free_graph(void *ctx, ir_graph *irg)
1634 {
1635         (void) ctx;
1636         if (! status->stat_options)
1637                 return;
1638
1639         STAT_ENTER;
1640         {
1641                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1642                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1643
1644                 graph->is_deleted = 1;
1645
1646                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1647                         /* count the nodes of the graph yet, it will be destroyed later */
1648                         update_graph_stat(global, graph);
1649                 }  /* if */
1650         }
1651         STAT_LEAVE;
1652 }  /* stat_free_graph */
1653
1654 /**
1655  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1656  *
1657  * @param ctx  the hook context
1658  * @param irg  the IR graph that will be walked
1659  * @param pre  the pre walker
1660  * @param post the post walker
1661  */
1662 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1663 {
1664         (void) ctx;
1665         (void) pre;
1666         (void) post;
1667         if (! status->stat_options)
1668                 return;
1669
1670         STAT_ENTER_SINGLE;
1671         {
1672                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1673
1674                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1675         }
1676         STAT_LEAVE;
1677 }  /* stat_irg_walk */
1678
1679 /**
1680  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1681  *
1682  * @param ctx  the hook context
1683  * @param irg  the IR graph that will be walked
1684  * @param pre  the pre walker
1685  * @param post the post walker
1686  */
1687 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1688 {
1689         /* for now, do NOT differentiate between blockwise and normal */
1690         stat_irg_walk(ctx, irg, pre, post);
1691 }  /* stat_irg_walk_blkwise */
1692
1693 /**
1694  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1695  *
1696  * @param ctx  the hook context
1697  * @param irg  the IR graph that will be walked
1698  * @param node the IR node
1699  * @param pre  the pre walker
1700  * @param post the post walker
1701  */
1702 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1703 {
1704         (void) ctx;
1705         (void) node;
1706         (void) pre;
1707         (void) post;
1708         if (! status->stat_options)
1709                 return;
1710
1711         STAT_ENTER_SINGLE;
1712         {
1713                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1714
1715                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1716         }
1717         STAT_LEAVE;
1718 }  /* stat_irg_block_walk */
1719
1720 /**
1721  * Called for every node that is removed due to an optimization.
1722  *
1723  * @param n     the IR node that will be removed
1724  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1725  * @param kind  the optimization kind
1726  */
1727 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind)
1728 {
1729         opt_entry_t *entry;
1730         ir_op *op = stat_get_irn_op(n);
1731
1732         /* ignore CSE for Constants */
1733         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1734                 return;
1735
1736         /* increase global value */
1737         entry = opt_get_entry(op, hmap);
1738         cnt_inc(&entry->count);
1739 }  /* removed_due_opt */
1740
1741 /**
1742  * Hook: Some nodes were optimized into some others due to an optimization.
1743  *
1744  * @param ctx  the hook context
1745  */
1746 static void stat_merge_nodes(
1747     void *ctx,
1748     ir_node **new_node_array, int new_num_entries,
1749     ir_node **old_node_array, int old_num_entries,
1750     hook_opt_kind opt)
1751 {
1752         (void) ctx;
1753         if (! status->stat_options)
1754                 return;
1755
1756         STAT_ENTER;
1757         {
1758                 int i, j;
1759                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1760
1761                 cnt_inc(&status->num_opts[opt]);
1762                 if (status->reassoc_run)
1763                         opt = HOOK_OPT_REASSOC;
1764
1765                 for (i = 0; i < old_num_entries; ++i) {
1766                         /* nodes might be in new and old, so if we found a node
1767                            in both sets, this one  is NOT removed */
1768                         for (j = 0; j < new_num_entries; ++j) {
1769                                 if (old_node_array[i] == new_node_array[j])
1770                                         break;
1771                         }  /* for */
1772                         if (j >= new_num_entries) {
1773                                 int xopt = opt;
1774
1775                                 /* sometimes we did not detect, that it is replaced by a Const */
1776                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1777                                         ir_op *op = get_irn_op(new_node_array[0]);
1778
1779                                         if (op == op_Const || op == op_SymConst)
1780                                                 xopt = HOOK_OPT_CONFIRM_C;
1781                                 }  /* if */
1782
1783                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt);
1784                         }  /* if */
1785                 }  /* for */
1786         }
1787         STAT_LEAVE;
1788 }  /* stat_merge_nodes */
1789
1790 /**
1791  * Hook: Reassociation is started/stopped.
1792  *
1793  * @param ctx   the hook context
1794  * @param flag  if non-zero, reassociation is started else stopped
1795  */
1796 static void stat_reassociate(void *ctx, int flag)
1797 {
1798         (void) ctx;
1799         if (! status->stat_options)
1800                 return;
1801
1802         STAT_ENTER;
1803         {
1804                 status->reassoc_run = flag;
1805         }
1806         STAT_LEAVE;
1807 }  /* stat_reassociate */
1808
1809 /**
1810  * Hook: A node was lowered into other nodes
1811  *
1812  * @param ctx  the hook context
1813  * @param node the IR node that will be lowered
1814  */
1815 static void stat_lower(void *ctx, ir_node *node)
1816 {
1817         (void) ctx;
1818         if (! status->stat_options)
1819                 return;
1820
1821         STAT_ENTER;
1822         {
1823                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1824
1825                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1826         }
1827         STAT_LEAVE;
1828 }  /* stat_lower */
1829
1830 /**
1831  * Hook: A graph was inlined.
1832  *
1833  * @param ctx  the hook context
1834  * @param call the IR call that will re changed into the body of
1835  *             the called IR graph
1836  * @param called_irg  the IR graph representing the called routine
1837  */
1838 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1839 {
1840         (void) ctx;
1841         if (! status->stat_options)
1842                 return;
1843
1844         STAT_ENTER;
1845         {
1846                 ir_graph *irg = get_irn_irg(call);
1847                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1848                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1849
1850                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1851                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1852         }
1853         STAT_LEAVE;
1854 }  /* stat_inline */
1855
1856 /**
1857  * Hook: A graph with tail-recursions was optimized.
1858  *
1859  * @param ctx  the hook context
1860  */
1861 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls)
1862 {
1863         (void) ctx;
1864         if (! status->stat_options)
1865                 return;
1866
1867         STAT_ENTER;
1868         {
1869                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1870
1871                 graph->num_tail_recursion += n_calls;
1872         }
1873         STAT_LEAVE;
1874 }  /* stat_tail_rec */
1875
1876 /**
1877  * Strength reduction was performed on an iteration variable.
1878  *
1879  * @param ctx  the hook context
1880  */
1881 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong)
1882 {
1883         (void) ctx;
1884         if (! status->stat_options)
1885                 return;
1886
1887         STAT_ENTER;
1888         {
1889                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1890                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1891
1892                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1893         }
1894         STAT_LEAVE;
1895 }  /* stat_strength_red */
1896
1897 /**
1898  * Hook: Start/Stop the dead node elimination.
1899  *
1900  * @param ctx  the hook context
1901  */
1902 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start)
1903 {
1904         (void) ctx;
1905         (void) irg;
1906         if (! status->stat_options)
1907                 return;
1908
1909         status->in_dead_node_elim = (start != 0);
1910 }  /* stat_dead_node_elim */
1911
1912 /**
1913  * Hook: if-conversion was tried.
1914  */
1915 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1916                                int pos, ir_node *mux, if_result_t reason)
1917 {
1918         (void) context;
1919         (void) phi;
1920         (void) pos;
1921         (void) mux;
1922         if (! status->stat_options)
1923                 return;
1924
1925         STAT_ENTER;
1926         {
1927                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1928
1929                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1930         }
1931         STAT_LEAVE;
1932 }  /* stat_if_conversion */
1933
1934 /**
1935  * Hook: real function call was optimized.
1936  */
1937 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1938 {
1939         (void) context;
1940         (void) call;
1941         if (! status->stat_options)
1942                 return;
1943
1944         STAT_ENTER;
1945         {
1946                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1947
1948                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1949         }
1950         STAT_LEAVE;
1951 }  /* stat_func_call */
1952
1953 /**
1954  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1955  *
1956  * @param ctx  the hook context
1957  */
1958 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul)
1959 {
1960         (void) ctx;
1961         if (! status->stat_options)
1962                 return;
1963
1964         STAT_ENTER;
1965         {
1966                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1967                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1968         }
1969         STAT_LEAVE;
1970 }  /* stat_arch_dep_replace_mul_with_shifts */
1971
1972 /**
1973  * Hook: A division by const was replaced.
1974  *
1975  * @param ctx   the hook context
1976  * @param node  the division node that will be optimized
1977  */
1978 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node)
1979 {
1980         (void) ctx;
1981         if (! status->stat_options)
1982                 return;
1983
1984         STAT_ENTER;
1985         {
1986                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1987                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1988         }
1989         STAT_LEAVE;
1990 }  /* stat_arch_dep_replace_division_by_const */
1991
1992 /*
1993  * Update the register pressure of a block.
1994  *
1995  * @param irg        the irg containing the block
1996  * @param block      the block for which the reg pressure should be set
1997  * @param pressure   the pressure
1998  * @param class_name the name of the register class
1999  */
2000 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
2001 {
2002         if (! status->stat_options)
2003                 return;
2004
2005         STAT_ENTER;
2006         {
2007                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
2008                 be_block_entry_t     *block_ent;
2009                 reg_pressure_entry_t *rp_ent;
2010
2011                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2012                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
2013
2014                 rp_ent->class_name = class_name;
2015                 rp_ent->pressure   = pressure;
2016
2017                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
2018         }
2019         STAT_LEAVE;
2020 }  /* stat_be_block_regpressure */
2021
2022 /**
2023  * Update the distribution of ready nodes of a block
2024  *
2025  * @param irg        the irg containing the block
2026  * @param block      the block for which the reg pressure should be set
2027  * @param num_ready  the number of ready nodes
2028  */
2029 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
2030 {
2031         if (! status->stat_options)
2032                 return;
2033
2034         STAT_ENTER;
2035         {
2036                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
2037                 be_block_entry_t *block_ent;
2038
2039                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2040
2041                 /* increase the counter of corresponding number of ready nodes */
2042                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2043         }
2044         STAT_LEAVE;
2045 }  /* stat_be_block_sched_ready */
2046
2047 /**
2048  * Update the permutation statistic of a block.
2049  *
2050  * @param class_name the name of the register class
2051  * @param n_regs     number of registers in the register class
2052  * @param perm       the perm node
2053  * @param block      the block containing the perm
2054  * @param size       the size of the perm
2055  * @param real_size  number of pairs with different registers
2056  */
2057 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2058                              int size, int real_size)
2059 {
2060         if (! status->stat_options)
2061                 return;
2062
2063         STAT_ENTER;
2064         {
2065                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2066                 be_block_entry_t   *block_ent;
2067                 perm_class_entry_t *pc_ent;
2068                 perm_stat_entry_t  *ps_ent;
2069
2070                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2071                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2072                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2073
2074                 pc_ent->n_regs = n_regs;
2075
2076                 /* update information */
2077                 ps_ent->size      = size;
2078                 ps_ent->real_size = real_size;
2079         }
2080         STAT_LEAVE;
2081 }  /* stat_be_block_stat_perm */
2082
2083 /**
2084  * Update the permutation statistic of a single perm.
2085  *
2086  * @param class_name the name of the register class
2087  * @param perm       the perm node
2088  * @param block      the block containing the perm
2089  * @param is_chain   1 if chain, 0 if cycle
2090  * @param size       length of the cycle/chain
2091  * @param n_ops      the number of ops representing this cycle/chain after lowering
2092  */
2093 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2094                                   int is_chain, int size, int n_ops)
2095 {
2096         if (! status->stat_options)
2097                 return;
2098
2099         STAT_ENTER;
2100         {
2101                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2102                 be_block_entry_t   *block_ent;
2103                 perm_class_entry_t *pc_ent;
2104                 perm_stat_entry_t  *ps_ent;
2105
2106                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2107                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2108                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2109
2110                 if (is_chain) {
2111                         ps_ent->n_copies += n_ops;
2112                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2113                 } else {
2114                         ps_ent->n_exchg += n_ops;
2115                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2116                 }  /* if */
2117         }
2118         STAT_LEAVE;
2119 }  /* stat_be_block_stat_permcycle */
2120
2121 /* Dumps a statistics snapshot. */
2122 void stat_dump_snapshot(const char *name, const char *phase)
2123 {
2124         char fname[2048];
2125         const char *p;
2126         size_t l;
2127
2128         if (! status->stat_options)
2129                 return;
2130
2131         STAT_ENTER;
2132         {
2133                 graph_entry_t *entry;
2134                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2135
2136                 /*
2137                  * The constant counter is only global, so we clear it here.
2138                  * Note that it does NOT contain the constants in DELETED
2139                  * graphs due to this.
2140                  */
2141                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2142                         stat_const_clear(status);
2143
2144                 /* build the name */
2145                 p = strrchr(name, '/');
2146 #ifdef _WIN32
2147                 {
2148                         const char *q;
2149
2150                         q = strrchr(name, '\\');
2151
2152                         /* NULL might be not the smallest pointer */
2153                         if (q && (!p || q > p))
2154                                 p = q;
2155                 }
2156 #endif /* _WIN32 */
2157                 if (p) {
2158                         ++p;
2159                         l = p - name;
2160
2161                         if (l > (int) (sizeof(fname) - 1))
2162                                 l = sizeof(fname) - 1;
2163
2164                         memcpy(fname, name, l);
2165                         fname[l] = '\0';
2166                 } else {
2167                         fname[0] = '\0';
2168                         p = name;
2169                 }  /* if */
2170                 strncat(fname, "firmstat-", sizeof(fname)-1);
2171                 strncat(fname, phase, sizeof(fname)-1);
2172                 strncat(fname, "-", sizeof(fname)-1);
2173                 strncat(fname, p, sizeof(fname)-1);
2174
2175                 stat_dump_init(fname);
2176
2177                 /* calculate the graph statistics */
2178                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2179                       entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2180                         if (entry->irg == NULL) {
2181                                 /* special entry for the global count */
2182                                 continue;
2183                         }  /* if */
2184                         if (! entry->is_deleted) {
2185                                 /* the graph is still alive, count the nodes on it */
2186                                 update_graph_stat(global, entry);
2187                         }  /* if */
2188                 }  /* for */
2189
2190                 /* some calculations are dependent, we pushed them on the wait_q */
2191                 while (! pdeq_empty(status->wait_q)) {
2192                         entry = (graph_entry_t*)pdeq_getr(status->wait_q);
2193
2194                         update_graph_stat_2(global, entry);
2195                 }  /* while */
2196
2197                 /* dump per graph */
2198                 for (entry = (graph_entry_t*)pset_first(status->irg_hash);
2199                      entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) {
2200                         if (entry->irg == NULL) {
2201                                 /* special entry for the global count */
2202                                 continue;
2203                         }  /* if */
2204
2205                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2206                                 stat_dump_graph(entry);
2207                                 stat_dump_registered(entry);
2208                         }  /* if */
2209
2210                         if (! entry->is_deleted) {
2211                                 /* clear the counter that are not accumulated */
2212                                 graph_clear_entry(entry, 0);
2213                         }  /* if */
2214                 }  /* for */
2215
2216                 /* dump global */
2217                 stat_dump_graph(global);
2218
2219                 /* dump the const info */
2220                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2221                         stat_dump_consts(&status->const_info);
2222
2223                 /* dump the parameter distribution */
2224                 stat_dump_param_tbl(status->dist_param_cnt, global);
2225
2226                 /* dump the optimization counter and clear them */
2227                 stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts));
2228                 clear_optimization_counter();
2229
2230                 stat_dump_finish();
2231
2232                 stat_finish_pattern_history(fname);
2233
2234                 /* clear the global counters here */
2235                 {
2236                         node_entry_t *entry;
2237
2238                         for (entry = (node_entry_t*)pset_first(global->opcode_hash);
2239                              entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) {
2240                                 opcode_clear_entry(entry);
2241                         }  /* for */
2242                         /* clear all global counter */
2243                         graph_clear_entry(global, /*all=*/1);
2244                 }
2245         }
2246         STAT_LEAVE;
2247 }  /* stat_dump_snapshot */
2248
2249 typedef struct pass_t {
2250         ir_prog_pass_t pass;
2251         const char     *fname;
2252         const char     *phase;
2253 } pass_t;
2254
2255 /**
2256  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2257  */
2258 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context)
2259 {
2260         pass_t *pass = (pass_t*)context;
2261
2262         (void)irp;
2263         stat_dump_snapshot(pass->fname, pass->phase);
2264         return 0;
2265 }  /* stat_dump_snapshot_wrapper */
2266
2267 /**
2268  * Ensure that no verifier is run from the wrapper.
2269  */
2270 static int no_verify(ir_prog *prog, void *ctx)
2271 {
2272         (void)prog;
2273         (void)ctx;
2274         return 0;
2275 }
2276
2277 /**
2278  * Ensure that no dumper is run from the wrapper.
2279  */
2280 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2281 {
2282         (void)prog;
2283         (void)ctx;
2284         (void)idx;
2285 }
2286
2287 /* create an ir_pog pass */
2288 ir_prog_pass_t *stat_dump_snapshot_pass(
2289         const char *name, const char *fname, const char *phase)
2290 {
2291         pass_t *pass = XMALLOCZ(pass_t);
2292
2293         def_prog_pass_constructor(
2294                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2295         pass->fname = fname;
2296         pass->phase = phase;
2297
2298         /* no dump/verify */
2299         pass->pass.dump_irprog   = no_dump;
2300         pass->pass.verify_irprog = no_verify;
2301
2302         return &pass->pass;
2303 }  /* stat_dump_snapshot_pass */
2304
2305 /** the hook entries for the Firm statistics module */
2306 static hook_entry_t stat_hooks[hook_last];
2307
2308 /* initialize the statistics module. */
2309 void firm_init_stat(unsigned enable_options)
2310 {
2311 #define X(a)  a, sizeof(a)-1
2312 #define HOOK(h, fkt) \
2313         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2314         unsigned num = 0;
2315
2316         if (! (enable_options & FIRMSTAT_ENABLED))
2317                 return;
2318
2319         status = XMALLOCZ(stat_info_t);
2320
2321         /* enable statistics */
2322         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2323
2324         /* register all hooks */
2325         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2326         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2327         HOOK(hook_new_node,                           stat_new_node);
2328         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2329         HOOK(hook_normalize,                          stat_normalize);
2330         HOOK(hook_new_graph,                          stat_new_graph);
2331         HOOK(hook_free_graph,                         stat_free_graph);
2332         HOOK(hook_irg_walk,                           stat_irg_walk);
2333         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2334         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2335         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2336         HOOK(hook_reassociate,                        stat_reassociate);
2337         HOOK(hook_lower,                              stat_lower);
2338         HOOK(hook_inline,                             stat_inline);
2339         HOOK(hook_tail_rec,                           stat_tail_rec);
2340         HOOK(hook_strength_red,                       stat_strength_red);
2341         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2342         HOOK(hook_if_conversion,                      stat_if_conversion);
2343         HOOK(hook_func_call,                          stat_func_call);
2344         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2345         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2346
2347         obstack_init(&status->cnts);
2348         obstack_init(&status->be_data);
2349
2350         /* create the hash-tables */
2351         status->irg_hash   = new_pset(graph_cmp, 8);
2352         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2353
2354         /* create the wait queue */
2355         status->wait_q     = new_pdeq();
2356
2357         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2358                 /* build the pseudo-ops */
2359
2360                 _op_Phi0.code    = --num;
2361                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2362
2363                 _op_PhiM.code    = --num;
2364                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2365
2366                 _op_ProjM.code   = --num;
2367                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2368
2369                 _op_MulC.code    = --num;
2370                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2371
2372                 _op_DivC.code    = --num;
2373                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2374
2375                 _op_ModC.code    = --num;
2376                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2377
2378                 _op_QuotC.code   = --num;
2379                 _op_QuotC.name   = new_id_from_chars(X("QuotC"));
2380
2381                 status->op_Phi0    = &_op_Phi0;
2382                 status->op_PhiM    = &_op_PhiM;
2383                 status->op_ProjM   = &_op_ProjM;
2384                 status->op_MulC    = &_op_MulC;
2385                 status->op_DivC    = &_op_DivC;
2386                 status->op_ModC    = &_op_ModC;
2387                 status->op_QuotC   = &_op_QuotC;
2388         } else {
2389                 status->op_Phi0    = NULL;
2390                 status->op_PhiM    = NULL;
2391                 status->op_ProjM   = NULL;
2392                 status->op_MulC    = NULL;
2393                 status->op_DivC    = NULL;
2394                 status->op_ModC    = NULL;
2395                 status->op_QuotC   = NULL;
2396         }  /* if */
2397
2398         /* for Florian: count the Sel depth */
2399         if (enable_options & FIRMSTAT_COUNT_SELS) {
2400                 _op_SelSel.code    = --num;
2401                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2402
2403                 _op_SelSelSel.code = --num;
2404                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2405
2406                 status->op_SelSel    = &_op_SelSel;
2407                 status->op_SelSelSel = &_op_SelSelSel;
2408         } else {
2409                 status->op_SelSel    = NULL;
2410                 status->op_SelSelSel = NULL;
2411         }  /* if */
2412
2413         /* register the dumper */
2414         stat_register_dumper(&simple_dumper);
2415
2416         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2417                 stat_register_dumper(&csv_dumper);
2418
2419         /* initialize the pattern hash */
2420         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2421
2422         /* initialize the Const options */
2423         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2424                 stat_init_const_cnt(status);
2425
2426         /* distribution table for parameter counts */
2427         status->dist_param_cnt = stat_new_int_distrib_tbl();
2428
2429         clear_optimization_counter();
2430
2431 #undef HOOK
2432 #undef X
2433 }  /* firm_init_stat */
2434
2435 /**
2436  * Frees all dumper structures.
2437  */
2438 static void stat_term_dumper(void)
2439 {
2440         dumper_t *dumper, *next_dumper;
2441
2442         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2443                 if (dumper->func_map)
2444                         del_pset(dumper->func_map);
2445
2446                 next_dumper = dumper->next;
2447                 free(dumper);
2448                 dumper = next_dumper;
2449         }  /* for */
2450 }  /* stat_term_dumper */
2451
2452
2453 /* Terminates the statistics module, frees all memory. */
2454 void stat_term(void)
2455 {
2456         if (status != (stat_info_t *)&status_disable) {
2457                 obstack_free(&status->be_data, NULL);
2458                 obstack_free(&status->cnts, NULL);
2459
2460                 stat_term_dumper();
2461
2462                 xfree(status);
2463                 status = (stat_info_t *)&status_disable;
2464         }
2465 }  /* stat_term */
2466
2467 /* returns 1 if statistics were initialized, 0 otherwise */
2468 int stat_is_active(void)
2469 {
2470         return status != (stat_info_t *)&status_disable;
2471 }  /* stat_is_active */
2472
2473 #else
2474
2475 /* initialize the statistics module. */
2476 void firm_init_stat(unsigned enable_options) {}
2477
2478 /* Dumps a statistics snapshot */
2479 void stat_dump_snapshot(const char *name, const char *phase) {}
2480
2481 /* terminates the statistics module, frees all memory */
2482 void stat_term(void);
2483
2484 #endif /* FIRM_STATISTICS */