move irlivechk from public to private
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2008 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #include "config.h"
27
28 #ifdef FIRM_STATISTICS
29
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "irouts.h"
35 #include "irdump.h"
36 #include "hashptr.h"
37 #include "firmstat_t.h"
38 #include "irpass_t.h"
39 #include "pattern.h"
40 #include "dags.h"
41 #include "stat_dmp.h"
42 #include "xmalloc.h"
43 #include "irhooks.h"
44
45 /*
46  * need this to be static:
47  * Special pseudo Opcodes that we need to count some interesting cases
48  */
49
50 /**
51  * The Phi0, a node that is created during SSA construction
52  */
53 static ir_op _op_Phi0;
54
55 /** The PhiM, just to count memory Phi's. */
56 static ir_op _op_PhiM;
57
58 /** The Mul by Const node. */
59 static ir_op _op_MulC;
60
61 /** The Div by Const node. */
62 static ir_op _op_DivC;
63
64 /** The Div by Const node. */
65 static ir_op _op_ModC;
66
67 /** The Div by Const node. */
68 static ir_op _op_DivModC;
69
70 /** The Quot by Const node. */
71 static ir_op _op_QuotC;
72
73 /** The memory Proj node. */
74 static ir_op _op_ProjM;
75
76 /** A Sel of a Sel */
77 static ir_op _op_SelSel;
78
79 /** A Sel of a Sel of a Sel */
80 static ir_op _op_SelSelSel;
81
82 /* ---------------------------------------------------------------------------------- */
83
84 /** Marks the begin of a statistic (hook) function. */
85 #define STAT_ENTER              ++status->recursive
86
87 /** Marks the end of a statistic (hook) functions. */
88 #define STAT_LEAVE              --status->recursive
89
90 /** Allows to enter a statistic function only when we are not already in a hook. */
91 #define STAT_ENTER_SINGLE       do { if (status->recursive > 0) return; ++status->recursive; } while (0)
92
93 /**
94  * global status
95  */
96 static const unsigned status_disable = 0;
97 static stat_info_t *status = (stat_info_t *)&status_disable;
98
99 /**
100  * Compare two elements of the opcode hash.
101  */
102 static int opcode_cmp(const void *elt, const void *key) {
103         const node_entry_t *e1 = elt;
104         const node_entry_t *e2 = key;
105
106         return e1->op->code - e2->op->code;
107 }  /* opcode_cmp */
108
109 /**
110  * Compare two elements of the graph hash.
111  */
112 static int graph_cmp(const void *elt, const void *key) {
113         const graph_entry_t *e1 = elt;
114         const graph_entry_t *e2 = key;
115
116         return e1->irg != e2->irg;
117 }  /* graph_cmp */
118
119 /**
120  * Compare two elements of the optimization hash.
121  */
122 static int opt_cmp(const void *elt, const void *key) {
123         const opt_entry_t *e1 = elt;
124         const opt_entry_t *e2 = key;
125
126         return e1->op->code != e2->op->code;
127 }  /* opt_cmp */
128
129 /**
130  * Compare two elements of the block/extbb hash.
131  */
132 static int block_cmp(const void *elt, const void *key) {
133         const block_entry_t *e1 = elt;
134         const block_entry_t *e2 = key;
135
136         /* it's enough to compare the block number */
137         return e1->block_nr != e2->block_nr;
138 }  /* block_cmp */
139
140 /**
141  * Compare two elements of the be_block hash.
142  */
143 static int be_block_cmp(const void *elt, const void *key) {
144         const be_block_entry_t *e1 = elt;
145         const be_block_entry_t *e2 = key;
146
147         return e1->block_nr != e2->block_nr;
148 }  /* be_block_cmp */
149
150 /**
151  * Compare two elements of reg pressure hash.
152  */
153 static int reg_pressure_cmp(const void *elt, const void *key) {
154         const reg_pressure_entry_t *e1 = elt;
155         const reg_pressure_entry_t *e2 = key;
156
157         return e1->class_name != e2->class_name;
158 }  /* reg_pressure_cmp */
159
160 /**
161  * Compare two elements of the perm_stat hash.
162  */
163 static int perm_stat_cmp(const void *elt, const void *key) {
164         const perm_stat_entry_t *e1 = elt;
165         const perm_stat_entry_t *e2 = key;
166
167         return e1->perm != e2->perm;
168 }  /* perm_stat_cmp */
169
170 /**
171  * Compare two elements of the perm_class hash.
172  */
173 static int perm_class_cmp(const void *elt, const void *key) {
174         const perm_class_entry_t *e1 = elt;
175         const perm_class_entry_t *e2 = key;
176
177         return e1->class_name != e2->class_name;
178 }  /* perm_class_cmp */
179
180 /**
181  * Compare two elements of the ir_op hash.
182  */
183 static int opcode_cmp_2(const void *elt, const void *key) {
184         const ir_op *e1 = elt;
185         const ir_op *e2 = key;
186
187         return e1->code != e2->code;
188 }  /* opcode_cmp_2 */
189
190 /**
191  * Compare two elements of the address_mark set.
192  */
193 static int address_mark_cmp(const void *elt, const void *key, size_t size) {
194         const address_mark_entry_t *e1 = elt;
195         const address_mark_entry_t *e2 = key;
196         (void) size;
197
198         /* compare only the nodes, the rest is used as data container */
199         return e1->node != e2->node;
200 }  /* address_mark_cmp */
201
202 /**
203  * Clear all counter in a node_entry_t.
204  */
205 static void opcode_clear_entry(node_entry_t *elem) {
206         cnt_clr(&elem->cnt_alive);
207         cnt_clr(&elem->new_node);
208         cnt_clr(&elem->into_Id);
209         cnt_clr(&elem->normalized);
210 }  /* opcode_clear_entry */
211
212 /**
213  * Returns the associates node_entry_t for an ir_op (and allocates
214  * one if not yet available).
215  *
216  * @param op    the IR operation
217  * @param hmap  a hash map containing ir_op* -> node_entry_t*
218  */
219 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap) {
220         node_entry_t key;
221         node_entry_t *elem;
222
223         key.op = op;
224
225         elem = pset_find(hmap, &key, op->code);
226         if (elem)
227                 return elem;
228
229         elem = OALLOCZ(&status->cnts, node_entry_t);
230
231         /* clear counter */
232         opcode_clear_entry(elem);
233
234         elem->op = op;
235
236         return pset_insert(hmap, elem, op->code);
237 }  /* opcode_get_entry */
238
239 /**
240  * Returns the associates ir_op for an opcode
241  *
242  * @param code  the IR opcode
243  * @param hmap  the hash map containing opcode -> ir_op*
244  */
245 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap) {
246         ir_op key;
247
248         key.code = code;
249         return pset_find(hmap, &key, code);
250 }  /* opcode_find_entry */
251
252 /**
253  * Clears all counter in a graph_entry_t.
254  *
255  * @param elem  the graph entry
256  * @param all   if non-zero, clears all counters, else leave accumulated ones
257  */
258 static void graph_clear_entry(graph_entry_t *elem, int all) {
259         int i;
260
261         /* clear accumulated / non-accumulated counter */
262         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
263                 cnt_clr(&elem->cnt[i]);
264         }  /* for */
265
266         if (elem->block_hash) {
267                 del_pset(elem->block_hash);
268                 elem->block_hash = NULL;
269         }  /* if */
270
271         if (elem->extbb_hash) {
272                 del_pset(elem->extbb_hash);
273                 elem->extbb_hash = NULL;
274         }  /* if */
275
276         obstack_free(&elem->recalc_cnts, NULL);
277         obstack_init(&elem->recalc_cnts);
278 }  /* graph_clear_entry */
279
280 /**
281  * Returns the associated graph_entry_t for an IR graph.
282  *
283  * @param irg   the IR graph, NULL for the global counter
284  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
285  */
286 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
287 {
288         graph_entry_t key;
289         graph_entry_t *elem;
290         size_t i;
291
292         key.irg = irg;
293
294         elem = pset_find(hmap, &key, HASH_PTR(irg));
295
296         if (elem) {
297                 /* create hash map backend block information */
298                 if (! elem->be_block_hash)
299                         elem->be_block_hash = new_pset(be_block_cmp, 5);
300
301                 return elem;
302         }  /* if */
303
304         /* allocate a new one */
305         elem = OALLOCZ(&status->cnts, graph_entry_t);
306         obstack_init(&elem->recalc_cnts);
307
308         /* clear counter */
309         graph_clear_entry(elem, 1);
310
311         /* new hash table for opcodes here  */
312         elem->opcode_hash   = new_pset(opcode_cmp, 5);
313         elem->address_mark  = new_set(address_mark_cmp, 5);
314         elem->irg           = irg;
315
316         /* these hash tables are created on demand */
317         elem->block_hash = NULL;
318         elem->extbb_hash = NULL;
319
320         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
321                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
322
323         return pset_insert(hmap, elem, HASH_PTR(irg));
324 }  /* graph_get_entry */
325
326 /**
327  * Clear all counter in an opt_entry_t.
328  */
329 static void opt_clear_entry(opt_entry_t *elem) {
330         cnt_clr(&elem->count);
331 }  /* opt_clear_entry */
332
333 /**
334  * Returns the associated opt_entry_t for an IR operation.
335  *
336  * @param op    the IR operation
337  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
338  */
339 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
340 {
341         opt_entry_t key;
342         opt_entry_t *elem;
343
344         key.op = op;
345
346         elem = pset_find(hmap, &key, op->code);
347         if (elem)
348                 return elem;
349
350         elem = OALLOCZ(&status->cnts, opt_entry_t);
351
352         /* clear new counter */
353         opt_clear_entry(elem);
354
355         elem->op = op;
356
357         return pset_insert(hmap, elem, op->code);
358 }  /* opt_get_entry */
359
360 /**
361  * clears all counter in a block_entry_t
362  */
363 static void block_clear_entry(block_entry_t *elem) {
364         int i;
365
366         for (i = 0; i < _bcnt_last; ++i)
367                 cnt_clr(&elem->cnt[i]);
368 }  /* block_clear_entry */
369
370 /**
371  * Returns the associated block_entry_t for an block.
372  *
373  * @param block_nr  an IR  block number
374  * @param hmap      a hash map containing long -> block_entry_t
375  */
376 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
377 {
378         block_entry_t key;
379         block_entry_t *elem;
380
381         key.block_nr = block_nr;
382
383         elem = pset_find(hmap, &key, block_nr);
384         if (elem)
385                 return elem;
386
387         elem = OALLOCZ(obst, block_entry_t);
388
389         /* clear new counter */
390         block_clear_entry(elem);
391
392         elem->block_nr = block_nr;
393
394         return pset_insert(hmap, elem, block_nr);
395 }  /* block_get_entry */
396
397 /**
398  * Clear all sets in be_block_entry_t.
399  */
400 static void be_block_clear_entry(be_block_entry_t *elem)
401 {
402         if (elem->reg_pressure)
403                 del_pset(elem->reg_pressure);
404
405         if (elem->sched_ready)
406                 stat_delete_distrib_tbl(elem->sched_ready);
407
408         if (elem->perm_class_stat)
409                 del_pset(elem->perm_class_stat);
410
411         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
412         elem->sched_ready     = stat_new_int_distrib_tbl();
413         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
414 }  /* be_block_clear_entry */
415
416 /**
417  * Returns the associated be_block_entry_t for an block.
418  *
419  * @param block_nr  an IR  block number
420  * @param hmap      a hash map containing long -> be_block_entry_t
421  */
422 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
423 {
424         be_block_entry_t key;
425         be_block_entry_t *elem;
426
427         key.block_nr = block_nr;
428
429         elem = pset_find(hmap, &key, block_nr);
430         if (elem)
431                 return elem;
432
433         elem = OALLOCZ(obst, be_block_entry_t);
434
435         /* clear new counter */
436         be_block_clear_entry(elem);
437
438         elem->block_nr = block_nr;
439
440         return pset_insert(hmap, elem, block_nr);
441 }  /* be_block_get_entry */
442
443 /**
444  * clears all sets in perm_class_entry_t
445  */
446 static void perm_class_clear_entry(perm_class_entry_t *elem) {
447         if (elem->perm_stat)
448                 del_pset(elem->perm_stat);
449
450         elem->perm_stat = new_pset(perm_stat_cmp, 5);
451 }  /* perm_class_clear_entry */
452
453 /**
454  * Returns the associated perm_class entry for a register class.
455  *
456  * @param class_name  the register class name
457  * @param hmap        a hash map containing class_name -> perm_class_entry_t
458  */
459 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
460                                                 hmap_perm_class_entry_t *hmap)
461 {
462         perm_class_entry_t key;
463         perm_class_entry_t *elem;
464
465         key.class_name = class_name;
466
467         elem = pset_find(hmap, &key, HASH_PTR(class_name));
468         if (elem)
469                 return elem;
470
471         elem = OALLOCZ(obst, perm_class_entry_t);
472
473         /* clear new counter */
474         perm_class_clear_entry(elem);
475
476         elem->class_name = class_name;
477
478         return pset_insert(hmap, elem, HASH_PTR(class_name));
479 }  /* perm_class_get_entry */
480
481 /**
482  * clears all sets in perm_stat_entry_t
483  */
484 static void perm_stat_clear_entry(perm_stat_entry_t *elem) {
485         if (elem->chains)
486                 stat_delete_distrib_tbl(elem->chains);
487
488         if (elem->cycles)
489                 stat_delete_distrib_tbl(elem->cycles);
490
491         elem->chains = stat_new_int_distrib_tbl();
492         elem->cycles = stat_new_int_distrib_tbl();
493 }  /* perm_stat_clear_entry */
494
495 /**
496  * Returns the associated perm_stat entry for a perm.
497  *
498  * @param perm      the perm node
499  * @param hmap      a hash map containing perm -> perm_stat_entry_t
500  */
501 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
502 {
503         perm_stat_entry_t key;
504         perm_stat_entry_t *elem;
505
506         key.perm = perm;
507
508         elem = pset_find(hmap, &key, HASH_PTR(perm));
509         if (elem)
510                 return elem;
511
512         elem = OALLOCZ(obst, perm_stat_entry_t);
513
514         /* clear new counter */
515         perm_stat_clear_entry(elem);
516
517         elem->perm = perm;
518
519         return pset_insert(hmap, elem, HASH_PTR(perm));
520 }  /* perm_stat_get_entry */
521
522 /**
523  * Clear optimizations counter,
524  */
525 static void clear_optimization_counter(void)  {
526         int i;
527         for (i = 0; i < FS_OPT_MAX; ++i)
528                 cnt_clr(&status->num_opts[i]);
529 }
530
531 /**
532  * Returns the ir_op for an IR-node,
533  * handles special cases and return pseudo op codes.
534  *
535  * @param none  an IR node
536  */
537 static ir_op *stat_get_irn_op(ir_node *node)
538 {
539         ir_op *op = get_irn_op(node);
540         ir_opcode opc = op->code;
541
542         switch (opc) {
543         case iro_Phi:
544                 if (get_irn_arity(node) == 0) {
545                         /* special case, a Phi0 node, count on extra counter */
546                         op = status->op_Phi0 ? status->op_Phi0 : op;
547                 } else if (get_irn_mode(node) == mode_M) {
548                         /* special case, a Memory Phi node, count on extra counter */
549                         op = status->op_PhiM ? status->op_PhiM : op;
550                 }  /* if */
551                 break;
552         case iro_Proj:
553                 if (get_irn_mode(node) == mode_M) {
554                         /* special case, a Memory Proj node, count on extra counter */
555                         op = status->op_ProjM ? status->op_ProjM : op;
556                 }  /* if */
557                 break;
558         case iro_Mul:
559                 if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) {
560                         /* special case, a Multiply by a const, count on extra counter */
561                         op = status->op_MulC ? status->op_MulC : op;
562                 }  /* if */
563                 break;
564         case iro_Div:
565                 if (is_Const(get_Div_right(node))) {
566                         /* special case, a division by a const, count on extra counter */
567                         op = status->op_DivC ? status->op_DivC : op;
568                 }  /* if */
569                 break;
570         case iro_Mod:
571                 if (is_Const(get_Mod_right(node))) {
572                         /* special case, a module by a const, count on extra counter */
573                         op = status->op_ModC ? status->op_ModC : op;
574                 }  /* if */
575                 break;
576         case iro_DivMod:
577                 if (is_Const(get_DivMod_right(node))) {
578                         /* special case, a division/modulo by a const, count on extra counter */
579                         op = status->op_DivModC ? status->op_DivModC : op;
580                 }  /* if */
581                 break;
582         case iro_Quot:
583                 if (is_Const(get_Quot_right(node))) {
584                         /* special case, a floating point division by a const, count on extra counter */
585                         op = status->op_QuotC ? status->op_QuotC : op;
586                 }  /* if */
587                 break;
588         case iro_Sel:
589                 if (is_Sel(get_Sel_ptr(node))) {
590                         /* special case, a Sel of a Sel, count on extra counter */
591                         op = status->op_SelSel ? status->op_SelSel : op;
592                         if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) {
593                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
594                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
595                         }  /* if */
596                 }  /* if */
597                 break;
598         default:
599                 ;
600         }  /* switch */
601
602         return op;
603 }  /* stat_get_irn_op */
604
605 /**
606  * update the block counter
607  */
608 static void undate_block_info(ir_node *node, graph_entry_t *graph)
609 {
610         ir_op *op = get_irn_op(node);
611         ir_node *block;
612         block_entry_t *b_entry;
613         int i, arity;
614
615         /* check for block */
616         if (op == op_Block) {
617                 arity = get_irn_arity(node);
618                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
619                 /* mark start end block to allow to filter them out */
620                 if (node == get_irg_start_block(graph->irg))
621                         b_entry->is_start = 1;
622                 else if (node == get_irg_end_block(graph->irg))
623                         b_entry->is_end = 1;
624
625                 /* count all incoming edges */
626                 for (i = 0; i < arity; ++i) {
627                         ir_node *pred = get_irn_n(node, i);
628                         ir_node *other_block = get_nodes_block(pred);
629                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
630
631                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
632                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
633                 }  /* for */
634                 return;
635         }  /* if */
636
637         block   = get_nodes_block(node);
638         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
639
640         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
641                 /* count data Phi per block */
642                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
643         }  /* if */
644
645         /* we have a new node in our block */
646         cnt_inc(&b_entry->cnt[bcnt_nodes]);
647
648         /* don't count keep-alive edges */
649         if (is_End(node))
650                 return;
651
652         arity = get_irn_arity(node);
653
654         for (i = 0; i < arity; ++i) {
655                 ir_node *pred = get_irn_n(node, i);
656                 ir_node *other_block;
657
658                 other_block = get_nodes_block(pred);
659
660                 if (other_block == block)
661                         cnt_inc(&b_entry->cnt[bcnt_edges]);     /* a in block edge */
662                 else {
663                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
664
665                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
666                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
667                 }  /* if */
668         }  /* for */
669 }  /* undate_block_info */
670
671 /**
672  * Update the extended block counter.
673  */
674 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
675 {
676         ir_op *op = get_irn_op(node);
677         ir_extblk *extbb;
678         extbb_entry_t *eb_entry;
679         int i, arity;
680
681         /* check for block */
682         if (op == op_Block) {
683                 extbb = get_nodes_extbb(node);
684                 arity = get_irn_arity(node);
685                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
686
687                 /* count all incoming edges */
688                 for (i = 0; i < arity; ++i) {
689                         ir_node *pred = get_irn_n(node, i);
690                         ir_extblk *other_extbb = get_nodes_extbb(pred);
691
692                         if (extbb != other_extbb) {
693                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
694
695                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
696                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
697                         }  /* if */
698                 }  /* for */
699                 return;
700         }  /* if */
701
702         extbb    = get_nodes_extbb(node);
703         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
704
705         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
706                 /* count data Phi per extbb */
707                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
708         }  /* if */
709
710         /* we have a new node in our block */
711         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
712
713         /* don't count keep-alive edges */
714         if (is_End(node))
715                 return;
716
717         arity = get_irn_arity(node);
718
719         for (i = 0; i < arity; ++i) {
720                 ir_node *pred = get_irn_n(node, i);
721                 ir_extblk *other_extbb = get_nodes_extbb(pred);
722
723                 if (other_extbb == extbb)
724                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
725                 else {
726                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
727
728                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
729                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
730                 }  /* if */
731         }  /* for */
732 }  /* update_extbb_info */
733
734 /**
735  * Calculates how many arguments of the call are const, updates
736  * param distribution.
737  */
738 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call) {
739         int i, num_const_args = 0, num_local_adr = 0;
740         int n = get_Call_n_params(call);
741
742         for (i = 0; i < n; ++i) {
743                 ir_node *param = get_Call_param(call, i);
744
745                 if (is_irn_constlike(param))
746                         ++num_const_args;
747                 else if (is_Sel(param)) {
748                         ir_node *base = param;
749
750                         do {
751                                 base = get_Sel_ptr(base);
752                         } while (is_Sel(base));
753
754                         if (base == get_irg_frame(current_ir_graph))
755                                 ++num_local_adr;
756                 }
757
758         }  /* for */
759
760         if (num_const_args > 0)
761                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
762         if (num_const_args == n)
763                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
764         if (num_local_adr > 0)
765                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
766
767         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
768 }  /* analyse_params_of_Call */
769
770 /**
771  * Update info on calls.
772  *
773  * @param call   The call
774  * @param graph  The graph entry containing the call
775  */
776 static void stat_update_call(ir_node *call, graph_entry_t *graph)
777 {
778         ir_node   *block = get_nodes_block(call);
779         ir_node   *ptr = get_Call_ptr(call);
780         ir_entity *ent = NULL;
781         ir_graph  *callee = NULL;
782
783         /*
784          * If the block is bad, the whole subgraph will collapse later
785          * so do not count this call.
786          * This happens in dead code.
787          */
788         if (is_Bad(block))
789                 return;
790
791         cnt_inc(&graph->cnt[gcnt_all_calls]);
792
793         /* found a call, this function is not a leaf */
794         graph->is_leaf = 0;
795
796         if (is_SymConst(ptr)) {
797                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
798                         /* ok, we seems to know the entity */
799                         ent = get_SymConst_entity(ptr);
800                         callee = get_entity_irg(ent);
801
802                         /* it is recursive, if it calls at least once */
803                         if (callee == graph->irg)
804                                 graph->is_recursive = 1;
805                         if (callee == NULL)
806                                 cnt_inc(&graph->cnt[gcnt_external_calls]);
807                 }  /* if */
808         } else {
809                 /* indirect call, be could not predict */
810                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
811
812                 /* NOT a leaf call */
813                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
814         }  /* if */
815
816         /* check, if it's a chain-call: Then, the call-block
817          * must dominate the end block. */
818         {
819                 ir_node *curr = get_irg_end_block(graph->irg);
820                 int depth = get_Block_dom_depth(block);
821
822                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
823                         curr = get_Block_idom(curr);
824
825                         if (! curr || is_no_Block(curr))
826                                 break;
827                 }  /* for */
828
829                 if (curr != block)
830                         graph->is_chain_call = 0;
831         }
832
833         /* check, if the callee is a leaf */
834         if (callee) {
835                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
836
837                 if (called->is_analyzed) {
838                         if (! called->is_leaf)
839                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
840                 }  /* if */
841         }  /* if */
842
843         analyse_params_of_Call(graph, call);
844 }  /* stat_update_call */
845
846 /**
847  * Update info on calls for graphs on the wait queue.
848  */
849 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
850 {
851         ir_node   *block = get_nodes_block(call);
852         ir_node   *ptr = get_Call_ptr(call);
853         ir_entity *ent = NULL;
854         ir_graph  *callee = NULL;
855
856         /*
857          * If the block is bad, the whole subgraph will collapse later
858          * so do not count this call.
859          * This happens in dead code.
860          */
861         if (is_Bad(block))
862                 return;
863
864         if (is_SymConst(ptr)) {
865                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
866                         /* ok, we seems to know the entity */
867                         ent = get_SymConst_entity(ptr);
868                         callee = get_entity_irg(ent);
869                 }  /* if */
870         }  /* if */
871
872         /* check, if the callee is a leaf */
873         if (callee) {
874                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
875
876                 assert(called->is_analyzed);
877
878                 if (! called->is_leaf)
879                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
880         } else
881                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
882 }  /* stat_update_call_2 */
883
884 /**
885  * Find the base address and entity of an Sel node.
886  *
887  * @param sel  the node
888  *
889  * @return the base address.
890  */
891 static ir_node *find_base_adr(ir_node *sel) {
892         ir_node *ptr = get_Sel_ptr(sel);
893
894         while (is_Sel(ptr)) {
895                 sel = ptr;
896                 ptr = get_Sel_ptr(sel);
897         }
898         return ptr;
899 }  /* find_base_adr */
900
901 /**
902  * Update info on Load/Store address statistics.
903  */
904 static void stat_update_address(ir_node *node, graph_entry_t *graph) {
905         ir_opcode opc = get_irn_opcode(node);
906         ir_node *base;
907         ir_graph *irg;
908
909         switch (opc) {
910         case iro_SymConst:
911                 /* a global address */
912                 cnt_inc(&graph->cnt[gcnt_global_adr]);
913                 break;
914         case iro_Sel:
915                 base = find_base_adr(node);
916                 irg = current_ir_graph;
917                 if (base == get_irg_tls(irg)) {
918                         /* a TLS variable, like a global. */
919                         cnt_inc(&graph->cnt[gcnt_global_adr]);
920                 } else if (base == get_irg_frame(irg)) {
921                         /* a local Variable. */
922                         cnt_inc(&graph->cnt[gcnt_local_adr]);
923                 } else {
924                         /* Pointer access */
925                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
926                                 /* pointer access through parameter, check for THIS */
927                                 ir_entity *ent = get_irg_entity(irg);
928
929                                 if (ent != NULL) {
930                                         ir_type *ent_tp = get_entity_type(ent);
931
932                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
933                                                 if (get_Proj_proj(base) == 0) {
934                                                         /* THIS pointer */
935                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
936                                                         goto end_parameter;
937                                                 }  /* if */
938                                         }  /* if */
939                                 }  /* if */
940                                 /* other parameter */
941                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
942 end_parameter:  ;
943                         } else {
944                                 /* unknown Pointer access */
945                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
946                         }  /* if */
947                 }  /* if */
948         default:
949                 ;
950         }  /* switch */
951 }  /* stat_update_address */
952
953 /**
954  * Walker for reachable nodes count.
955  */
956 static void update_node_stat(ir_node *node, void *env)
957 {
958         graph_entry_t *graph = env;
959         node_entry_t *entry;
960
961         ir_op *op = stat_get_irn_op(node);
962         int i, arity = get_irn_arity(node);
963
964         entry = opcode_get_entry(op, graph->opcode_hash);
965
966         cnt_inc(&entry->cnt_alive);
967         cnt_add_i(&graph->cnt[gcnt_edges], arity);
968
969         /* count block edges */
970         undate_block_info(node, graph);
971
972         /* count extended block edges */
973         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
974                 if (graph->irg != get_const_code_irg())
975                         update_extbb_info(node, graph);
976         }  /* if */
977
978         /* handle statistics for special node types */
979
980         switch (op->code) {
981         case iro_Call:
982                 /* check for properties that depends on calls like recursion/leaf/indirect call */
983                 stat_update_call(node, graph);
984                 break;
985         case iro_Load:
986                 /* check address properties */
987                 stat_update_address(get_Load_ptr(node), graph);
988                 break;
989         case iro_Store:
990                 /* check address properties */
991                 stat_update_address(get_Store_ptr(node), graph);
992                 break;
993         case iro_Phi:
994                 /* check for non-strict Phi nodes */
995                 for (i = arity - 1; i >= 0; --i) {
996                         ir_node *pred = get_Phi_pred(node, i);
997                         if (is_Unknown(pred)) {
998                                 /* found an Unknown predecessor, graph is not strict */
999                                 graph->is_strict = 0;
1000                                 break;
1001                         }
1002                 }
1003         default:
1004                 ;
1005         }  /* switch */
1006
1007         /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
1008         if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
1009                 int i;
1010
1011                 for (i = get_irn_arity(node) - 1; i >= 0; --i) {
1012                         ir_node *pred = get_irn_n(node, i);
1013
1014                         if (is_Const(pred)) {
1015                                 /* check properties of constants */
1016                                 stat_update_const(status, pred, graph);
1017                         }  /* if */
1018                 }  /* for */
1019         }  /* if */
1020 }  /* update_node_stat */
1021
1022 /**
1023  * Walker for reachable nodes count for graphs on the wait_q.
1024  */
1025 static void update_node_stat_2(ir_node *node, void *env) {
1026         graph_entry_t *graph = env;
1027
1028         /* check for properties that depends on calls like recursion/leaf/indirect call */
1029         if (is_Call(node))
1030                 stat_update_call_2(node, graph);
1031 }  /* update_node_stat_2 */
1032
1033 /**
1034  * Get the current address mark.
1035  */
1036 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node) {
1037         address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1038
1039         return value ? value->mark : 0;
1040 }  /* get_adr_mark */
1041
1042 /**
1043  * Set the current address mark.
1044  */
1045 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val) {
1046         address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1047
1048         value->mark = val;
1049 }  /* set_adr_mark */
1050
1051 #undef DUMP_ADR_MODE
1052
1053 #ifdef DUMP_ADR_MODE
1054 /**
1055  * a vcg attribute hook: Color a node with a different color if
1056  * it's identified as a part of an address expression or at least referenced
1057  * by an address expression.
1058  */
1059 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1060 {
1061         ir_node *n           = local ? local : node;
1062         ir_graph *irg        = get_irn_irg(n);
1063         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1064         unsigned mark        = get_adr_mark(graph, n);
1065
1066         if (mark & MARK_ADDRESS_CALC)
1067                 fprintf(F, "color: purple");
1068         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1069                 fprintf(F, "color: pink");
1070         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1071                 fprintf(F, "color: lightblue");
1072         else
1073                 return 0;
1074
1075         /* I know the color! */
1076         return 1;
1077 }  /* stat_adr_mark_hook */
1078 #endif /* DUMP_ADR_MODE */
1079
1080 /**
1081  * Return the "operational" mode of a Firm node.
1082  */
1083 static ir_mode *get_irn_op_mode(ir_node *node) {
1084         switch (get_irn_opcode(node)) {
1085         case iro_Load:
1086                 return get_Load_mode(node);
1087         case iro_Store:
1088                 return get_irn_mode(get_Store_value(node));
1089         case iro_DivMod:
1090                 return get_irn_mode(get_DivMod_left(node));
1091         case iro_Div:
1092                 return get_irn_mode(get_Div_left(node));
1093         case iro_Mod:
1094                 return get_irn_mode(get_Mod_left(node));
1095         case iro_Cmp:
1096                 /* Cmp is no address calculation, or is it? */
1097         default:
1098                 return get_irn_mode(node);
1099         }  /* switch */
1100 }  /* get_irn_op_mode */
1101
1102 /**
1103  * Post-walker that marks every node that is an address calculation.
1104  *
1105  * Users of a node must be visited first. We ensure this by
1106  * calling it in the post of an outs walk. This should work even in cycles,
1107  * while the normal pre-walk will not.
1108  */
1109 static void mark_address_calc(ir_node *node, void *env) {
1110         graph_entry_t *graph = env;
1111         ir_mode *mode = get_irn_op_mode(node);
1112         int i, n;
1113         unsigned mark_preds = MARK_REF_NON_ADR;
1114
1115         if (! mode_is_data(mode))
1116                 return;
1117
1118         if (mode_is_reference(mode)) {
1119                 /* a reference is calculated here, we are sure */
1120                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1121
1122                 mark_preds = MARK_REF_ADR;
1123         } else {
1124                 unsigned mark = get_adr_mark(graph, node);
1125
1126                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1127                         /*
1128                          * this node has no reference mode, but is only
1129                          * referenced by address calculations
1130                          */
1131                         mark_preds = MARK_REF_ADR;
1132                 }  /* if */
1133         }  /* if */
1134
1135         /* mark all predecessors */
1136         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1137                 ir_node *pred = get_irn_n(node, i);
1138
1139                 mode = get_irn_op_mode(pred);
1140                 if (! mode_is_data(mode))
1141                         continue;
1142
1143                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1144         }  /* for */
1145 }  /* mark_address_calc */
1146
1147 /**
1148  * Post-walker that marks every node that is an address calculation.
1149  *
1150  * Users of a node must be visited first. We ensure this by
1151  * calling it in the post of an outs walk. This should work even in cycles,
1152  * while the normal pre-walk will not.
1153  */
1154 static void count_adr_ops(ir_node *node, void *env) {
1155         graph_entry_t *graph = env;
1156         unsigned mark        = get_adr_mark(graph, node);
1157
1158         if (mark & MARK_ADDRESS_CALC)
1159                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1160         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1161                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1162         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1163                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1164 }  /* count_adr_ops */
1165
1166 /**
1167  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1168  * is called, must recalculate all statistic info.
1169  *
1170  * @param global    The global entry
1171  * @param graph     The current entry
1172  */
1173 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1174 {
1175         node_entry_t *entry;
1176         int i;
1177
1178         /* clear first the alive counter in the graph */
1179         foreach_pset(graph->opcode_hash, entry) {
1180                 cnt_clr(&entry->cnt_alive);
1181         }  /* foreach_pset */
1182
1183         /* set pessimistic values */
1184         graph->is_leaf       = 1;
1185         graph->is_leaf_call  = LCS_UNKNOWN;
1186         graph->is_recursive  = 0;
1187         graph->is_chain_call = 1;
1188         graph->is_strict     = 1;
1189
1190         /* create new block counter */
1191         graph->block_hash = new_pset(block_cmp, 5);
1192
1193         /* we need dominator info */
1194         if (graph->irg != get_const_code_irg()) {
1195                 assure_doms(graph->irg);
1196
1197                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1198                         /* we need extended basic blocks */
1199                         compute_extbb(graph->irg);
1200
1201                         /* create new extbb counter */
1202                         graph->extbb_hash = new_pset(block_cmp, 5);
1203                 }  /* if */
1204         }  /* if */
1205
1206         /* count the nodes in the graph */
1207         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1208
1209 #if 0
1210         /* Uncomment this code if chain-call means call exact one. */
1211         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1212
1213         /* check if we have more than 1 call */
1214         if (cnt_gt(entry->cnt_alive, 1))
1215                 graph->is_chain_call = 0;
1216 #endif
1217
1218         /* recursive functions are never chain calls, leafs don't have calls */
1219         if (graph->is_recursive || graph->is_leaf)
1220                 graph->is_chain_call = 0;
1221
1222         /* assume we walk every graph only ONCE, we could sum here the global count */
1223         foreach_pset(graph->opcode_hash, entry) {
1224                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1225
1226                 /* update the node counter */
1227                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1228         }  /* foreach_pset */
1229
1230         /* count the number of address calculation */
1231         if (graph->irg != get_const_code_irg()) {
1232                 ir_graph *rem = current_ir_graph;
1233
1234                 assure_irg_outs(graph->irg);
1235
1236                 /* Must be done an the outs graph */
1237                 current_ir_graph = graph->irg;
1238                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1239                 current_ir_graph = rem;
1240
1241 #ifdef DUMP_ADR_MODE
1242                 /* register the vcg hook and dump the graph for test */
1243                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1244                 dump_ir_block_graph(graph->irg, "-adr");
1245                 set_dump_node_vcgattr_hook(NULL);
1246 #endif /* DUMP_ADR_MODE */
1247
1248                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1249         }  /* if */
1250
1251         /* count the DAG's */
1252         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1253                 count_dags_in_graph(global, graph);
1254
1255         /* calculate the patterns of this graph */
1256         stat_calc_pattern_history(graph->irg);
1257
1258         /* leaf function did not call others */
1259         if (graph->is_leaf)
1260                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1261         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1262                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1263                 pdeq_putl(status->wait_q, graph);
1264         }  /* if */
1265
1266         /* we have analyzed this graph */
1267         graph->is_analyzed = 1;
1268
1269         /* accumulate all counter's */
1270         for (i = 0; i < _gcnt_last; ++i)
1271                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1272 }  /* update_graph_stat */
1273
1274 /**
1275  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1276  * must finish all statistic info calculations.
1277  *
1278  * @param global    The global entry
1279  * @param graph     The current entry
1280  */
1281 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1282 {
1283         (void) global;
1284         if (graph->is_deleted) {
1285                 /* deleted, ignore */
1286                 return;
1287         }
1288
1289         if (graph->irg) {
1290                 /* count the nodes in the graph */
1291                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1292
1293                 if (graph->is_leaf_call == LCS_UNKNOWN)
1294                         graph->is_leaf_call = LCS_LEAF_CALL;
1295         }  /* if */
1296 }  /* update_graph_stat_2 */
1297
1298 /**
1299  * Register a dumper.
1300  */
1301 static void stat_register_dumper(const dumper_t *dumper) {
1302         dumper_t *p = XMALLOC(dumper_t);
1303
1304         memcpy(p, dumper, sizeof(*p));
1305
1306         p->next        = status->dumper;
1307         p->status      = status;
1308         status->dumper = p;
1309
1310         /* FIXME: memory leak */
1311 }  /* stat_register_dumper */
1312
1313 /**
1314  * Dumps the statistics of an IR graph.
1315  */
1316 static void stat_dump_graph(graph_entry_t *entry) {
1317         dumper_t *dumper;
1318
1319         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1320                 if (dumper->dump_graph)
1321                         dumper->dump_graph(dumper, entry);
1322         }  /* for */
1323 }  /* stat_dump_graph */
1324
1325 /**
1326  * Calls all registered dumper functions.
1327  */
1328 static void stat_dump_registered(graph_entry_t *entry) {
1329         dumper_t *dumper;
1330
1331         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1332                 if (dumper->func_map) {
1333                         dump_graph_FUNC func;
1334
1335                         foreach_pset(dumper->func_map, func)
1336                                 func(dumper, entry);
1337                 }  /* if */
1338         }  /* for */
1339 }  /* stat_dump_registered */
1340
1341 /**
1342  * Dumps a constant table.
1343  */
1344 static void stat_dump_consts(const constant_info_t *tbl) {
1345         dumper_t *dumper;
1346
1347         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1348                 if (dumper->dump_const_tbl)
1349                         dumper->dump_const_tbl(dumper, tbl);
1350         }  /* for */
1351 }  /* stat_dump_consts */
1352
1353 /**
1354  * Dumps the parameter distribution
1355  */
1356 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global) {
1357         dumper_t *dumper;
1358
1359         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1360                 if (dumper->dump_param_tbl)
1361                         dumper->dump_param_tbl(dumper, tbl, global);
1362         }  /* for */
1363 }  /* stat_dump_param_tbl */
1364
1365 /**
1366  * Dumps the optimization counter
1367  */
1368 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len) {
1369         dumper_t *dumper;
1370
1371         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1372                 if (dumper->dump_opt_cnt)
1373                         dumper->dump_opt_cnt(dumper, tbl, len);
1374         }  /* for */
1375 }  /* stat_dump_opt_cnt */
1376
1377 /**
1378  * Initialize the dumper.
1379  */
1380 static void stat_dump_init(const char *name) {
1381         dumper_t *dumper;
1382
1383         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1384                 if (dumper->init)
1385                         dumper->init(dumper, name);
1386         }  /* for */
1387 }  /* stat_dump_init */
1388
1389 /**
1390  * Finish the dumper.
1391  */
1392 static void stat_dump_finish(void) {
1393         dumper_t *dumper;
1394
1395         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1396                 if (dumper->finish)
1397                         dumper->finish(dumper);
1398         }  /* for */
1399 }  /* stat_dump_finish */
1400
1401 /**
1402  * Register an additional function for all dumper.
1403  */
1404 void stat_register_dumper_func(dump_graph_FUNC func) {
1405         dumper_t *dumper;
1406
1407         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1408                 if (! dumper->func_map)
1409                         dumper->func_map = pset_new_ptr(3);
1410                 pset_insert_ptr(dumper->func_map, func);
1411         }  /* for */
1412 }  /* stat_register_dumper_func */
1413
1414 /* ---------------------------------------------------------------------- */
1415
1416 /*
1417  * Helper: get an ir_op from an opcode.
1418  */
1419 ir_op *stat_get_op_from_opcode(ir_opcode code) {
1420         return opcode_find_entry(code, status->ir_op_hash);
1421 }  /* stat_get_op_from_opcode */
1422
1423 /**
1424  * Hook: A new IR op is registered.
1425  *
1426  * @param ctx  the hook context
1427  * @param op   the new IR opcode that was created.
1428  */
1429 static void stat_new_ir_op(void *ctx, ir_op *op) {
1430         (void) ctx;
1431         if (! status->stat_options)
1432                 return;
1433
1434         STAT_ENTER;
1435         {
1436                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1437
1438                 /* execute for side effect :-) */
1439                 (void)opcode_get_entry(op, graph->opcode_hash);
1440
1441                 pset_insert(status->ir_op_hash, op, op->code);
1442         }
1443         STAT_LEAVE;
1444 }  /* stat_new_ir_op */
1445
1446 /**
1447  * Hook: An IR op is freed.
1448  *
1449  * @param ctx  the hook context
1450  * @param op   the IR opcode that is freed
1451  */
1452 static void stat_free_ir_op(void *ctx, ir_op *op) {
1453         (void) ctx;
1454         (void) op;
1455         if (! status->stat_options)
1456                 return;
1457
1458         STAT_ENTER;
1459         {
1460         }
1461         STAT_LEAVE;
1462 }  /* stat_free_ir_op */
1463
1464 /**
1465  * Hook: A new node is created.
1466  *
1467  * @param ctx   the hook context
1468  * @param irg   the IR graph on which the node is created
1469  * @param node  the new IR node that was created
1470  */
1471 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) {
1472         (void) ctx;
1473         (void) irg;
1474         if (! status->stat_options)
1475                 return;
1476
1477         /* do NOT count during dead node elimination */
1478         if (status->in_dead_node_elim)
1479                 return;
1480
1481         STAT_ENTER;
1482         {
1483                 node_entry_t *entry;
1484                 graph_entry_t *graph;
1485                 ir_op *op = stat_get_irn_op(node);
1486
1487                 /* increase global value */
1488                 graph = graph_get_entry(NULL, status->irg_hash);
1489                 entry = opcode_get_entry(op, graph->opcode_hash);
1490                 cnt_inc(&entry->new_node);
1491
1492                 /* increase local value */
1493                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1494                 entry = opcode_get_entry(op, graph->opcode_hash);
1495                 cnt_inc(&entry->new_node);
1496         }
1497         STAT_LEAVE;
1498 }  /* stat_new_node */
1499
1500 /**
1501  * Hook: A node is changed into a Id node
1502  *
1503  * @param ctx   the hook context
1504  * @param node  the IR node that will be turned into an ID
1505  */
1506 static void stat_turn_into_id(void *ctx, ir_node *node) {
1507         (void) ctx;
1508         if (! status->stat_options)
1509                 return;
1510
1511         STAT_ENTER;
1512         {
1513                 node_entry_t *entry;
1514                 graph_entry_t *graph;
1515                 ir_op *op = stat_get_irn_op(node);
1516
1517                 /* increase global value */
1518                 graph = graph_get_entry(NULL, status->irg_hash);
1519                 entry = opcode_get_entry(op, graph->opcode_hash);
1520                 cnt_inc(&entry->into_Id);
1521
1522                 /* increase local value */
1523                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1524                 entry = opcode_get_entry(op, graph->opcode_hash);
1525                 cnt_inc(&entry->into_Id);
1526         }
1527         STAT_LEAVE;
1528 }  /* stat_turn_into_id */
1529
1530 /**
1531  * Hook: A node is normalized
1532  *
1533  * @param ctx   the hook context
1534  * @param node  the IR node that was normalized
1535  */
1536 static void stat_normalize(void *ctx, ir_node *node) {
1537         (void) ctx;
1538         if (! status->stat_options)
1539                 return;
1540
1541         STAT_ENTER;
1542         {
1543                 node_entry_t *entry;
1544                 graph_entry_t *graph;
1545                 ir_op *op = stat_get_irn_op(node);
1546
1547                 /* increase global value */
1548                 graph = graph_get_entry(NULL, status->irg_hash);
1549                 entry = opcode_get_entry(op, graph->opcode_hash);
1550                 cnt_inc(&entry->normalized);
1551
1552                 /* increase local value */
1553                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1554                 entry = opcode_get_entry(op, graph->opcode_hash);
1555                 cnt_inc(&entry->normalized);
1556         }
1557         STAT_LEAVE;
1558 }  /* stat_normalize */
1559
1560 /**
1561  * Hook: A new graph was created
1562  *
1563  * @param ctx  the hook context
1564  * @param irg  the new IR graph that was created
1565  * @param ent  the entity of this graph
1566  */
1567 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent) {
1568         (void) ctx;
1569         if (! status->stat_options)
1570                 return;
1571
1572         STAT_ENTER;
1573         {
1574                 /* execute for side effect :-) */
1575                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1576
1577                 graph->ent           = ent;
1578                 graph->is_deleted    = 0;
1579                 graph->is_leaf       = 0;
1580                 graph->is_leaf_call  = 0;
1581                 graph->is_recursive  = 0;
1582                 graph->is_chain_call = 0;
1583                 graph->is_strict     = 1;
1584                 graph->is_analyzed   = 0;
1585         }
1586         STAT_LEAVE;
1587 }  /* stat_new_graph */
1588
1589 /**
1590  * Hook: A graph will be deleted
1591  *
1592  * @param ctx  the hook context
1593  * @param irg  the IR graph that will be deleted
1594  *
1595  * Note that we still hold the information for this graph
1596  * in our hash maps, only a flag is set which prevents this
1597  * information from being changed, it's "frozen" from now.
1598  */
1599 static void stat_free_graph(void *ctx, ir_graph *irg) {
1600         (void) ctx;
1601         if (! status->stat_options)
1602                 return;
1603
1604         STAT_ENTER;
1605         {
1606                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1607                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1608
1609                 graph->is_deleted = 1;
1610
1611                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1612                         /* count the nodes of the graph yet, it will be destroyed later */
1613                         update_graph_stat(global, graph);
1614                 }  /* if */
1615         }
1616         STAT_LEAVE;
1617 }  /* stat_free_graph */
1618
1619 /**
1620  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1621  *
1622  * @param ctx  the hook context
1623  * @param irg  the IR graph that will be walked
1624  * @param pre  the pre walker
1625  * @param post the post walker
1626  */
1627 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1628 {
1629         (void) ctx;
1630         (void) pre;
1631         (void) post;
1632         if (! status->stat_options)
1633                 return;
1634
1635         STAT_ENTER_SINGLE;
1636         {
1637                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1638
1639                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1640         }
1641         STAT_LEAVE;
1642 }  /* stat_irg_walk */
1643
1644 /**
1645  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1646  *
1647  * @param ctx  the hook context
1648  * @param irg  the IR graph that will be walked
1649  * @param pre  the pre walker
1650  * @param post the post walker
1651  */
1652 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1653 {
1654         /* for now, do NOT differentiate between blockwise and normal */
1655         stat_irg_walk(ctx, irg, pre, post);
1656 }  /* stat_irg_walk_blkwise */
1657
1658 /**
1659  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1660  *
1661  * @param ctx  the hook context
1662  * @param irg  the IR graph that will be walked
1663  * @param node the IR node
1664  * @param pre  the pre walker
1665  * @param post the post walker
1666  */
1667 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1668 {
1669         (void) ctx;
1670         (void) node;
1671         (void) pre;
1672         (void) post;
1673         if (! status->stat_options)
1674                 return;
1675
1676         STAT_ENTER_SINGLE;
1677         {
1678                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1679
1680                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1681         }
1682         STAT_LEAVE;
1683 }  /* stat_irg_block_walk */
1684
1685 /**
1686  * Called for every node that is removed due to an optimization.
1687  *
1688  * @param n     the IR node that will be removed
1689  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1690  * @param kind  the optimization kind
1691  */
1692 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind) {
1693         opt_entry_t *entry;
1694         ir_op *op = stat_get_irn_op(n);
1695
1696         /* ignore CSE for Constants */
1697         if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n)))
1698                 return;
1699
1700         /* increase global value */
1701         entry = opt_get_entry(op, hmap);
1702         cnt_inc(&entry->count);
1703 }  /* removed_due_opt */
1704
1705 /**
1706  * Hook: Some nodes were optimized into some others due to an optimization.
1707  *
1708  * @param ctx  the hook context
1709  */
1710 static void stat_merge_nodes(
1711     void *ctx,
1712     ir_node **new_node_array, int new_num_entries,
1713     ir_node **old_node_array, int old_num_entries,
1714     hook_opt_kind opt)
1715 {
1716         (void) ctx;
1717         if (! status->stat_options)
1718                 return;
1719
1720         STAT_ENTER;
1721         {
1722                 int i, j;
1723                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1724
1725                 cnt_inc(&status->num_opts[opt]);
1726                 if (status->reassoc_run)
1727                         opt = HOOK_OPT_REASSOC;
1728
1729                 for (i = 0; i < old_num_entries; ++i) {
1730                         /* nodes might be in new and old, so if we found a node
1731                            in both sets, this one  is NOT removed */
1732                         for (j = 0; j < new_num_entries; ++j) {
1733                                 if (old_node_array[i] == new_node_array[j])
1734                                         break;
1735                         }  /* for */
1736                         if (j >= new_num_entries) {
1737                                 int xopt = opt;
1738
1739                                 /* sometimes we did not detect, that it is replaced by a Const */
1740                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1741                                         ir_op *op = get_irn_op(new_node_array[0]);
1742
1743                                         if (op == op_Const || op == op_SymConst)
1744                                                 xopt = HOOK_OPT_CONFIRM_C;
1745                                 }  /* if */
1746
1747                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt], xopt);
1748                         }  /* if */
1749                 }  /* for */
1750         }
1751         STAT_LEAVE;
1752 }  /* stat_merge_nodes */
1753
1754 /**
1755  * Hook: Reassociation is started/stopped.
1756  *
1757  * @param ctx   the hook context
1758  * @param flag  if non-zero, reassociation is started else stopped
1759  */
1760 static void stat_reassociate(void *ctx, int flag) {
1761         (void) ctx;
1762         if (! status->stat_options)
1763                 return;
1764
1765         STAT_ENTER;
1766         {
1767                 status->reassoc_run = flag;
1768         }
1769         STAT_LEAVE;
1770 }  /* stat_reassociate */
1771
1772 /**
1773  * Hook: A node was lowered into other nodes
1774  *
1775  * @param ctx  the hook context
1776  * @param node the IR node that will be lowered
1777  */
1778 static void stat_lower(void *ctx, ir_node *node) {
1779         (void) ctx;
1780         if (! status->stat_options)
1781                 return;
1782
1783         STAT_ENTER;
1784         {
1785                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1786
1787                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED);
1788         }
1789         STAT_LEAVE;
1790 }  /* stat_lower */
1791
1792 /**
1793  * Hook: A graph was inlined.
1794  *
1795  * @param ctx  the hook context
1796  * @param call the IR call that will re changed into the body of
1797  *             the called IR graph
1798  * @param called_irg  the IR graph representing the called routine
1799  */
1800 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1801 {
1802         (void) ctx;
1803         if (! status->stat_options)
1804                 return;
1805
1806         STAT_ENTER;
1807         {
1808                 ir_graph *irg = get_irn_irg(call);
1809                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1810                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1811
1812                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1813                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1814         }
1815         STAT_LEAVE;
1816 }  /* stat_inline */
1817
1818 /**
1819  * Hook: A graph with tail-recursions was optimized.
1820  *
1821  * @param ctx  the hook context
1822  */
1823 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) {
1824         (void) ctx;
1825         if (! status->stat_options)
1826                 return;
1827
1828         STAT_ENTER;
1829         {
1830                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1831
1832                 graph->num_tail_recursion += n_calls;
1833         }
1834         STAT_LEAVE;
1835 }  /* stat_tail_rec */
1836
1837 /**
1838  * Strength reduction was performed on an iteration variable.
1839  *
1840  * @param ctx  the hook context
1841  */
1842 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong) {
1843         (void) ctx;
1844         if (! status->stat_options)
1845                 return;
1846
1847         STAT_ENTER;
1848         {
1849                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1850                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1851
1852                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED);
1853         }
1854         STAT_LEAVE;
1855 }  /* stat_strength_red */
1856
1857 /**
1858  * Hook: Start/Stop the dead node elimination.
1859  *
1860  * @param ctx  the hook context
1861  */
1862 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start) {
1863         (void) ctx;
1864         (void) irg;
1865         if (! status->stat_options)
1866                 return;
1867
1868         status->in_dead_node_elim = (start != 0);
1869 }  /* stat_dead_node_elim */
1870
1871 /**
1872  * Hook: if-conversion was tried.
1873  */
1874 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1875                                int pos, ir_node *mux, if_result_t reason)
1876 {
1877         (void) context;
1878         (void) phi;
1879         (void) pos;
1880         (void) mux;
1881         if (! status->stat_options)
1882                 return;
1883
1884         STAT_ENTER;
1885         {
1886                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1887
1888                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1889         }
1890         STAT_LEAVE;
1891 }  /* stat_if_conversion */
1892
1893 /**
1894  * Hook: real function call was optimized.
1895  */
1896 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1897 {
1898         (void) context;
1899         (void) call;
1900         if (! status->stat_options)
1901                 return;
1902
1903         STAT_ENTER;
1904         {
1905                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1906
1907                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1908         }
1909         STAT_LEAVE;
1910 }  /* stat_func_call */
1911
1912 /**
1913  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1914  *
1915  * @param ctx  the hook context
1916  */
1917 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul) {
1918         (void) ctx;
1919         if (! status->stat_options)
1920                 return;
1921
1922         STAT_ENTER;
1923         {
1924                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1925                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1926         }
1927         STAT_LEAVE;
1928 }  /* stat_arch_dep_replace_mul_with_shifts */
1929
1930 /**
1931  * Hook: A division by const was replaced.
1932  *
1933  * @param ctx   the hook context
1934  * @param node  the division node that will be optimized
1935  */
1936 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node) {
1937         (void) ctx;
1938         if (! status->stat_options)
1939                 return;
1940
1941         STAT_ENTER;
1942         {
1943                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1944                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP);
1945         }
1946         STAT_LEAVE;
1947 }  /* stat_arch_dep_replace_division_by_const */
1948
1949 /*
1950  * Update the register pressure of a block.
1951  *
1952  * @param irg        the irg containing the block
1953  * @param block      the block for which the reg pressure should be set
1954  * @param pressure   the pressure
1955  * @param class_name the name of the register class
1956  */
1957 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1958 {
1959         if (! status->stat_options)
1960                 return;
1961
1962         STAT_ENTER;
1963         {
1964                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1965                 be_block_entry_t     *block_ent;
1966                 reg_pressure_entry_t *rp_ent;
1967
1968                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1969                 rp_ent    = OALLOCZ(&status->be_data, reg_pressure_entry_t);
1970
1971                 rp_ent->class_name = class_name;
1972                 rp_ent->pressure   = pressure;
1973
1974                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
1975         }
1976         STAT_LEAVE;
1977 }  /* stat_be_block_regpressure */
1978
1979 /**
1980  * Update the distribution of ready nodes of a block
1981  *
1982  * @param irg        the irg containing the block
1983  * @param block      the block for which the reg pressure should be set
1984  * @param num_ready  the number of ready nodes
1985  */
1986 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
1987 {
1988         if (! status->stat_options)
1989                 return;
1990
1991         STAT_ENTER;
1992         {
1993                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
1994                 be_block_entry_t *block_ent;
1995
1996                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1997
1998                 /* increase the counter of corresponding number of ready nodes */
1999                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
2000         }
2001         STAT_LEAVE;
2002 }  /* stat_be_block_sched_ready */
2003
2004 /**
2005  * Update the permutation statistic of a block.
2006  *
2007  * @param class_name the name of the register class
2008  * @param n_regs     number of registers in the register class
2009  * @param perm       the perm node
2010  * @param block      the block containing the perm
2011  * @param size       the size of the perm
2012  * @param real_size  number of pairs with different registers
2013  */
2014 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
2015                              int size, int real_size)
2016 {
2017         if (! status->stat_options)
2018                 return;
2019
2020         STAT_ENTER;
2021         {
2022                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2023                 be_block_entry_t   *block_ent;
2024                 perm_class_entry_t *pc_ent;
2025                 perm_stat_entry_t  *ps_ent;
2026
2027                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2028                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2029                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2030
2031                 pc_ent->n_regs = n_regs;
2032
2033                 /* update information */
2034                 ps_ent->size      = size;
2035                 ps_ent->real_size = real_size;
2036         }
2037         STAT_LEAVE;
2038 }  /* stat_be_block_stat_perm */
2039
2040 /**
2041  * Update the permutation statistic of a single perm.
2042  *
2043  * @param class_name the name of the register class
2044  * @param perm       the perm node
2045  * @param block      the block containing the perm
2046  * @param is_chain   1 if chain, 0 if cycle
2047  * @param size       length of the cycle/chain
2048  * @param n_ops      the number of ops representing this cycle/chain after lowering
2049  */
2050 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
2051                                   int is_chain, int size, int n_ops)
2052 {
2053         if (! status->stat_options)
2054                 return;
2055
2056         STAT_ENTER;
2057         {
2058                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2059                 be_block_entry_t   *block_ent;
2060                 perm_class_entry_t *pc_ent;
2061                 perm_stat_entry_t  *ps_ent;
2062
2063                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2064                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2065                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2066
2067                 if (is_chain) {
2068                         ps_ent->n_copies += n_ops;
2069                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2070                 } else {
2071                         ps_ent->n_exchg += n_ops;
2072                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2073                 }  /* if */
2074         }
2075         STAT_LEAVE;
2076 }  /* stat_be_block_stat_permcycle */
2077
2078 /* Dumps a statistics snapshot. */
2079 void stat_dump_snapshot(const char *name, const char *phase)
2080 {
2081         char fname[2048];
2082         const char *p;
2083         int l;
2084
2085         if (! status->stat_options)
2086                 return;
2087
2088         STAT_ENTER;
2089         {
2090                 graph_entry_t *entry;
2091                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2092
2093                 /*
2094                  * The constant counter is only global, so we clear it here.
2095                  * Note that it does NOT contain the constants in DELETED
2096                  * graphs due to this.
2097                  */
2098                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2099                         stat_const_clear(status);
2100
2101                 /* build the name */
2102                 p = strrchr(name, '/');
2103 #ifdef _WIN32
2104                 {
2105                         const char *q;
2106
2107                         q = strrchr(name, '\\');
2108
2109                         /* NULL might be not the smallest pointer */
2110                         if (q && (!p || q > p))
2111                                 p = q;
2112                 }
2113 #endif /* _WIN32 */
2114                 if (p) {
2115                         ++p;
2116                         l = p - name;
2117
2118                         if (l > (int) (sizeof(fname) - 1))
2119                                 l = sizeof(fname) - 1;
2120
2121                         memcpy(fname, name, l);
2122                         fname[l] = '\0';
2123                 } else {
2124                         fname[0] = '\0';
2125                         p = name;
2126                 }  /* if */
2127                 strncat(fname, "firmstat-", sizeof(fname)-1);
2128                 strncat(fname, phase, sizeof(fname)-1);
2129                 strncat(fname, "-", sizeof(fname)-1);
2130                 strncat(fname, p, sizeof(fname)-1);
2131
2132                 stat_dump_init(fname);
2133
2134                 /* calculate the graph statistics */
2135                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2136                         if (entry->irg == NULL) {
2137                                 /* special entry for the global count */
2138                                 continue;
2139                         }  /* if */
2140                         if (! entry->is_deleted) {
2141                                 /* the graph is still alive, count the nodes on it */
2142                                 update_graph_stat(global, entry);
2143                         }  /* if */
2144                 }  /* for */
2145
2146                 /* some calculations are dependent, we pushed them on the wait_q */
2147                 while (! pdeq_empty(status->wait_q)) {
2148                         entry = pdeq_getr(status->wait_q);
2149
2150                         update_graph_stat_2(global, entry);
2151                 }  /* while */
2152
2153                 /* dump per graph */
2154                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2155                         if (entry->irg == NULL) {
2156                                 /* special entry for the global count */
2157                                 continue;
2158                         }  /* if */
2159
2160                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2161                                 stat_dump_graph(entry);
2162                                 stat_dump_registered(entry);
2163                         }  /* if */
2164
2165                         if (! entry->is_deleted) {
2166                                 /* clear the counter that are not accumulated */
2167                                 graph_clear_entry(entry, 0);
2168                         }  /* if */
2169                 }  /* for */
2170
2171                 /* dump global */
2172                 stat_dump_graph(global);
2173
2174                 /* dump the const info */
2175                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2176                         stat_dump_consts(&status->const_info);
2177
2178                 /* dump the parameter distribution */
2179                 stat_dump_param_tbl(status->dist_param_cnt, global);
2180
2181                 /* dump the optimization counter and clear them */
2182                 stat_dump_opt_cnt(status->num_opts, ARR_SIZE(status->num_opts));
2183                 clear_optimization_counter();
2184
2185                 stat_dump_finish();
2186
2187                 stat_finish_pattern_history(fname);
2188
2189                 /* clear the global counters here */
2190                 {
2191                         node_entry_t *entry;
2192
2193                         for (entry = pset_first(global->opcode_hash); entry; entry = pset_next(global->opcode_hash)) {
2194                                 opcode_clear_entry(entry);
2195                         }  /* for */
2196                         /* clear all global counter */
2197                         graph_clear_entry(global, /*all=*/1);
2198                 }
2199         }
2200         STAT_LEAVE;
2201 }  /* stat_dump_snapshot */
2202
2203 struct pass_t {
2204         ir_prog_pass_t pass;
2205         const char     *fname;
2206         const char     *phase;
2207 };
2208
2209 /**
2210  * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper.
2211  */
2212 static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context) {
2213         struct pass_t *pass = context;
2214
2215         (void)irp;
2216         stat_dump_snapshot(pass->fname, pass->phase);
2217         return 0;
2218 }  /* stat_dump_snapshot_wrapper */
2219
2220 /**
2221  * Ensure that no verifier is run from the wrapper.
2222  */
2223 static int no_verify(ir_prog *prog, void *ctx)
2224 {
2225         (void)prog;
2226         (void)ctx;
2227         return 0;
2228 }
2229
2230 /**
2231  * Ensure that no dumper is run from the wrapper.
2232  */
2233 static void no_dump(ir_prog *prog, void *ctx, unsigned idx)
2234 {
2235         (void)prog;
2236         (void)ctx;
2237         (void)idx;
2238 }
2239
2240 /* create an ir_pog pass */
2241 ir_prog_pass_t *stat_dump_snapshot_pass(
2242         const char *name, const char *fname, const char *phase) {
2243         struct pass_t *pass = XMALLOCZ(struct pass_t);
2244
2245         def_prog_pass_constructor(
2246                 &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper);
2247         pass->fname = fname;
2248         pass->phase = phase;
2249
2250         /* no dump/verify */
2251         pass->pass.dump_irprog   = no_dump;
2252         pass->pass.verify_irprog = no_verify;
2253
2254         return &pass->pass;
2255 }  /* stat_dump_snapshot_pass */
2256
2257 /** the hook entries for the Firm statistics module */
2258 static hook_entry_t stat_hooks[hook_last];
2259
2260 /* initialize the statistics module. */
2261 void firm_init_stat(unsigned enable_options)
2262 {
2263 #define X(a)  a, sizeof(a)-1
2264 #define HOOK(h, fkt) \
2265         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2266         unsigned num = 0;
2267
2268         if (! (enable_options & FIRMSTAT_ENABLED))
2269                 return;
2270
2271         status = XMALLOCZ(stat_info_t);
2272
2273         /* enable statistics */
2274         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2275
2276         /* register all hooks */
2277         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2278         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2279         HOOK(hook_new_node,                           stat_new_node);
2280         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2281         HOOK(hook_normalize,                          stat_normalize);
2282         HOOK(hook_new_graph,                          stat_new_graph);
2283         HOOK(hook_free_graph,                         stat_free_graph);
2284         HOOK(hook_irg_walk,                           stat_irg_walk);
2285         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2286         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2287         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2288         HOOK(hook_reassociate,                        stat_reassociate);
2289         HOOK(hook_lower,                              stat_lower);
2290         HOOK(hook_inline,                             stat_inline);
2291         HOOK(hook_tail_rec,                           stat_tail_rec);
2292         HOOK(hook_strength_red,                       stat_strength_red);
2293         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2294         HOOK(hook_if_conversion,                      stat_if_conversion);
2295         HOOK(hook_func_call,                          stat_func_call);
2296         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2297         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2298
2299         obstack_init(&status->cnts);
2300         obstack_init(&status->be_data);
2301
2302         /* create the hash-tables */
2303         status->irg_hash   = new_pset(graph_cmp, 8);
2304         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2305
2306         /* create the wait queue */
2307         status->wait_q     = new_pdeq();
2308
2309         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2310                 /* build the pseudo-ops */
2311
2312                 _op_Phi0.code    = --num;
2313                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2314
2315                 _op_PhiM.code    = --num;
2316                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2317
2318                 _op_ProjM.code   = --num;
2319                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2320
2321                 _op_MulC.code    = --num;
2322                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2323
2324                 _op_DivC.code    = --num;
2325                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2326
2327                 _op_ModC.code    = --num;
2328                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2329
2330                 _op_DivModC.code = --num;
2331                 _op_DivModC.name = new_id_from_chars(X("DivModC"));
2332
2333                 _op_QuotC.code   = --num;
2334                 _op_QuotC.name   = new_id_from_chars(X("QuotC"));
2335
2336                 status->op_Phi0    = &_op_Phi0;
2337                 status->op_PhiM    = &_op_PhiM;
2338                 status->op_ProjM   = &_op_ProjM;
2339                 status->op_MulC    = &_op_MulC;
2340                 status->op_DivC    = &_op_DivC;
2341                 status->op_ModC    = &_op_ModC;
2342                 status->op_DivModC = &_op_DivModC;
2343                 status->op_QuotC   = &_op_QuotC;
2344         } else {
2345                 status->op_Phi0    = NULL;
2346                 status->op_PhiM    = NULL;
2347                 status->op_ProjM   = NULL;
2348                 status->op_MulC    = NULL;
2349                 status->op_DivC    = NULL;
2350                 status->op_ModC    = NULL;
2351                 status->op_DivModC = NULL;
2352                 status->op_QuotC   = NULL;
2353         }  /* if */
2354
2355         /* for Florian: count the Sel depth */
2356         if (enable_options & FIRMSTAT_COUNT_SELS) {
2357                 _op_SelSel.code    = --num;
2358                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2359
2360                 _op_SelSelSel.code = --num;
2361                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2362
2363                 status->op_SelSel    = &_op_SelSel;
2364                 status->op_SelSelSel = &_op_SelSelSel;
2365         } else {
2366                 status->op_SelSel    = NULL;
2367                 status->op_SelSelSel = NULL;
2368         }  /* if */
2369
2370         /* register the dumper */
2371         stat_register_dumper(&simple_dumper);
2372
2373         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2374                 stat_register_dumper(&csv_dumper);
2375
2376         /* initialize the pattern hash */
2377         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2378
2379         /* initialize the Const options */
2380         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2381                 stat_init_const_cnt(status);
2382
2383         /* distribution table for parameter counts */
2384         status->dist_param_cnt = stat_new_int_distrib_tbl();
2385
2386         clear_optimization_counter();
2387
2388 #undef HOOK
2389 #undef X
2390 }  /* firm_init_stat */
2391
2392 /**
2393  * Frees all dumper structures.
2394  */
2395 static void stat_term_dumper(void) {
2396         dumper_t *dumper, *next_dumper;
2397
2398         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2399                 if (dumper->func_map)
2400                         del_pset(dumper->func_map);
2401
2402                 next_dumper = dumper->next;
2403                 free(dumper);
2404                 dumper = next_dumper;
2405         }  /* for */
2406 }  /* stat_term_dumper */
2407
2408
2409 /* Terminates the statistics module, frees all memory. */
2410 void stat_term(void) {
2411         if (status != (stat_info_t *)&status_disable) {
2412                 obstack_free(&status->be_data, NULL);
2413                 obstack_free(&status->cnts, NULL);
2414
2415                 stat_term_dumper();
2416
2417                 xfree(status);
2418                 status = (stat_info_t *)&status_disable;
2419         }
2420 }  /* stat_term */
2421
2422 /* returns 1 if statistics were initialized, 0 otherwise */
2423 int stat_is_active(void) {
2424         return status != (stat_info_t *)&status_disable;
2425 }  /* stat_is_active */
2426
2427 #else
2428
2429 /* initialize the statistics module. */
2430 void firm_init_stat(unsigned enable_options) {}
2431
2432 /* Dumps a statistics snapshot */
2433 void stat_dump_snapshot(const char *name, const char *phase) {}
2434
2435 /* terminates the statistics module, frees all memory */
2436 void stat_term(void);
2437
2438 #endif /* FIRM_STATISTICS */