75f1cf0864b9af72a5d6fd33cf3474a0ddba03fb
[libfirm] / ir / stat / firmstat.c
1 /*
2  * Copyright (C) 1995-2008 University of Karlsruhe.  All right reserved.
3  *
4  * This file is part of libFirm.
5  *
6  * This file may be distributed and/or modified under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation and appearing in the file LICENSE.GPL included in the
9  * packaging of this file.
10  *
11  * Licensees holding valid libFirm Professional Edition licenses may use
12  * this file in accordance with the libFirm Commercial License.
13  * Agreement provided with the Software.
14  *
15  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE.
18  */
19
20 /**
21  * @file
22  * @brief   Statistics for Firm.
23  * @author  Michael Beck
24  * @version $Id$
25  */
26 #ifdef HAVE_CONFIG_H
27 # include "config.h"
28 #endif
29
30 #ifdef FIRM_STATISTICS
31
32 #include <stdio.h>
33
34 #ifdef HAVE_STDLIB_H
35 # include <stdlib.h>
36 #endif
37 #ifdef HAVE_STRING_H
38 # include <string.h>
39 #endif
40
41 #include "irouts.h"
42 #include "irdump.h"
43 #include "hashptr.h"
44 #include "firmstat_t.h"
45 #include "pattern.h"
46 #include "dags.h"
47 #include "stat_dmp.h"
48 #include "xmalloc.h"
49 #include "irhooks.h"
50
51 /*
52  * need this to be static:
53  * Special pseudo Opcodes that we need to count some interesting cases
54  */
55
56 /**
57  * The Phi0, a node that is created during SSA construction
58  */
59 static ir_op _op_Phi0;
60
61 /** The PhiM, just to count memory Phi's. */
62 static ir_op _op_PhiM;
63
64 /** The Mul by Const node. */
65 static ir_op _op_MulC;
66
67 /** The Div by Const node. */
68 static ir_op _op_DivC;
69
70 /** The Div by Const node. */
71 static ir_op _op_ModC;
72
73 /** The Div by Const node. */
74 static ir_op _op_DivModC;
75
76 /** The memory Proj node. */
77 static ir_op _op_ProjM;
78
79 /** A Sel of a Sel */
80 static ir_op _op_SelSel;
81
82 /** A Sel of a Sel of a Sel */
83 static ir_op _op_SelSelSel;
84
85 /* ---------------------------------------------------------------------------------- */
86
87 /** Marks the begin of a statistic (hook) function. */
88 #define STAT_ENTER              ++status->recursive
89
90 /** Marks the end of a statistic (hook) functions. */
91 #define STAT_LEAVE              --status->recursive
92
93 /** Allows to enter a statistic function only when we are not already in a hook. */
94 #define STAT_ENTER_SINGLE       do { if (status->recursive > 0) return; ++status->recursive; } while (0)
95
96 /**
97  * global status
98  */
99 static const int status_disable = 0;
100 static stat_info_t *status = (stat_info_t *)&status_disable;
101
102 /**
103  * Compare two elements of the opcode hash.
104  */
105 static int opcode_cmp(const void *elt, const void *key) {
106         const node_entry_t *e1 = elt;
107         const node_entry_t *e2 = key;
108
109         return e1->op->code - e2->op->code;
110 }  /* opcode_cmp */
111
112 /**
113  * Compare two elements of the graph hash.
114  */
115 static int graph_cmp(const void *elt, const void *key) {
116         const graph_entry_t *e1 = elt;
117         const graph_entry_t *e2 = key;
118
119         return e1->irg != e2->irg;
120 }  /* graph_cmp */
121
122 /**
123  * Compare two elements of the optimization hash.
124  */
125 static int opt_cmp(const void *elt, const void *key) {
126         const opt_entry_t *e1 = elt;
127         const opt_entry_t *e2 = key;
128
129         return e1->op->code != e2->op->code;
130 }  /* opt_cmp */
131
132 /**
133  * Compare two elements of the block/extbb hash.
134  */
135 static int block_cmp(const void *elt, const void *key) {
136         const block_entry_t *e1 = elt;
137         const block_entry_t *e2 = key;
138
139         /* it's enough to compare the block number */
140         return e1->block_nr != e2->block_nr;
141 }  /* block_cmp */
142
143 /**
144  * Compare two elements of the be_block hash.
145  */
146 static int be_block_cmp(const void *elt, const void *key) {
147         const be_block_entry_t *e1 = elt;
148         const be_block_entry_t *e2 = key;
149
150         return e1->block_nr != e2->block_nr;
151 }  /* be_block_cmp */
152
153 /**
154  * Compare two elements of reg pressure hash.
155  */
156 static int reg_pressure_cmp(const void *elt, const void *key) {
157         const reg_pressure_entry_t *e1 = elt;
158         const reg_pressure_entry_t *e2 = key;
159
160         return e1->class_name != e2->class_name;
161 }  /* reg_pressure_cmp */
162
163 /**
164  * Compare two elements of the perm_stat hash.
165  */
166 static int perm_stat_cmp(const void *elt, const void *key) {
167         const perm_stat_entry_t *e1 = elt;
168         const perm_stat_entry_t *e2 = key;
169
170         return e1->perm != e2->perm;
171 }  /* perm_stat_cmp */
172
173 /**
174  * Compare two elements of the perm_class hash.
175  */
176 static int perm_class_cmp(const void *elt, const void *key) {
177         const perm_class_entry_t *e1 = elt;
178         const perm_class_entry_t *e2 = key;
179
180         return e1->class_name != e2->class_name;
181 }  /* perm_class_cmp */
182
183 /**
184  * Compare two elements of the ir_op hash.
185  */
186 static int opcode_cmp_2(const void *elt, const void *key) {
187         const ir_op *e1 = elt;
188         const ir_op *e2 = key;
189
190         return e1->code != e2->code;
191 }  /* opcode_cmp_2 */
192
193 /**
194  * Compare two elements of the address_mark set.
195  */
196 static int address_mark_cmp(const void *elt, const void *key, size_t size) {
197         const address_mark_entry_t *e1 = elt;
198         const address_mark_entry_t *e2 = key;
199         (void) size;
200
201         /* compare only the nodes, the rest is used as data container */
202         return e1->node != e2->node;
203 }  /* address_mark_cmp */
204
205 /**
206  * Clear all counter in a node_entry_t.
207  */
208 static void opcode_clear_entry(node_entry_t *elem) {
209         cnt_clr(&elem->cnt_alive);
210         cnt_clr(&elem->new_node);
211         cnt_clr(&elem->into_Id);
212 }  /* opcode_clear_entry */
213
214 /**
215  * Returns the associates node_entry_t for an ir_op
216  *
217  * @param op    the IR operation
218  * @param hmap  a hash map containing ir_op* -> node_entry_t*
219  */
220 static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap) {
221         node_entry_t key;
222         node_entry_t *elem;
223
224         key.op = op;
225
226         elem = pset_find(hmap, &key, op->code);
227         if (elem)
228                 return elem;
229
230         elem = obstack_alloc(&status->cnts, sizeof(*elem));
231         memset(elem, 0, sizeof(*elem));
232
233         /* clear counter */
234         opcode_clear_entry(elem);
235
236         elem->op = op;
237
238         return pset_insert(hmap, elem, op->code);
239 }  /* opcode_get_entry */
240
241 /**
242  * Returns the associates ir_op for an opcode
243  *
244  * @param code  the IR opcode
245  * @param hmap  the hash map containing opcode -> ir_op*
246  */
247 static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap) {
248         ir_op key;
249
250         key.code = code;
251         return pset_find(hmap, &key, code);
252 }  /* opcode_find_entry */
253
254 /**
255  * Clears all counter in a graph_entry_t.
256  *
257  * @param elem  the graph entry
258  * @param all   if non-zero, clears all counters, else leave accumulated ones
259  */
260 static void graph_clear_entry(graph_entry_t *elem, int all) {
261         int i;
262
263         /* clear accumulated / non-acumulated counter */
264         for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) {
265                 cnt_clr(&elem->cnt[i]);
266         }  /* for */
267
268         if (elem->block_hash) {
269                 del_pset(elem->block_hash);
270                 elem->block_hash = NULL;
271         }  /* if */
272
273         if (elem->extbb_hash) {
274                 del_pset(elem->extbb_hash);
275                 elem->extbb_hash = NULL;
276         }  /* if */
277
278         obstack_free(&elem->recalc_cnts, NULL);
279         obstack_init(&elem->recalc_cnts);
280 }  /* graph_clear_entry */
281
282 /**
283  * Returns the associated graph_entry_t for an IR graph.
284  *
285  * @param irg   the IR graph
286  * @param hmap  the hash map containing ir_graph* -> graph_entry_t*
287  */
288 static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap)
289 {
290         graph_entry_t key;
291         graph_entry_t *elem;
292         size_t i;
293
294         key.irg = irg;
295
296         elem = pset_find(hmap, &key, HASH_PTR(irg));
297
298         if (elem) {
299                 /* create hash map backend block information */
300                 if (! elem->be_block_hash)
301                         elem->be_block_hash = new_pset(be_block_cmp, 5);
302
303                 return elem;
304         }  /* if */
305
306         /* allocate a new one */
307         elem = obstack_alloc(&status->cnts, sizeof(*elem));
308         memset(elem, 0, sizeof(*elem));
309         obstack_init(&elem->recalc_cnts);
310
311         /* clear counter */
312         graph_clear_entry(elem, 1);
313
314         /* new hash table for opcodes here  */
315         elem->opcode_hash   = new_pset(opcode_cmp, 5);
316         elem->address_mark  = new_set(address_mark_cmp, 5);
317         elem->irg           = irg;
318
319         /* these hash tables are created on demand */
320         elem->block_hash = NULL;
321         elem->extbb_hash = NULL;
322
323         for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
324                 elem->opt_hash[i] = new_pset(opt_cmp, 4);
325
326         return pset_insert(hmap, elem, HASH_PTR(irg));
327 }  /* graph_get_entry */
328
329 /**
330  * Clear all counter in an opt_entry_t.
331  */
332 static void opt_clear_entry(opt_entry_t *elem) {
333         cnt_clr(&elem->count);
334 }  /* opt_clear_entry */
335
336 /**
337  * Returns the associated opt_entry_t for an IR operation.
338  *
339  * @param op    the IR operation
340  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
341  */
342 static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap)
343 {
344         opt_entry_t key;
345         opt_entry_t *elem;
346
347         key.op = op;
348
349         elem = pset_find(hmap, &key, op->code);
350         if (elem)
351                 return elem;
352
353         elem = obstack_alloc(&status->cnts, sizeof(*elem));
354         memset(elem, 0, sizeof(*elem));
355
356         /* clear new counter */
357         opt_clear_entry(elem);
358
359         elem->op = op;
360
361         return pset_insert(hmap, elem, op->code);
362 }  /* opt_get_entry */
363
364 /**
365  * clears all counter in a block_entry_t
366  */
367 static void block_clear_entry(block_entry_t *elem) {
368         int i;
369
370         for (i = 0; i < _bcnt_last; ++i)
371                 cnt_clr(&elem->cnt[i]);
372 }  /* block_clear_entry */
373
374 /**
375  * Returns the associated block_entry_t for an block.
376  *
377  * @param block_nr  an IR  block number
378  * @param hmap      a hash map containing long -> block_entry_t
379  */
380 static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_block_entry_t *hmap)
381 {
382         block_entry_t key;
383         block_entry_t *elem;
384
385         key.block_nr = block_nr;
386
387         elem = pset_find(hmap, &key, block_nr);
388         if (elem)
389                 return elem;
390
391         elem = obstack_alloc(obst, sizeof(*elem));
392         memset(elem, 0, sizeof(*elem));
393
394         /* clear new counter */
395         block_clear_entry(elem);
396
397         elem->block_nr = block_nr;
398
399         return pset_insert(hmap, elem, block_nr);
400 }  /* block_get_entry */
401
402 /**
403  * Clear all sets in be_block_entry_t.
404  */
405 static void be_block_clear_entry(be_block_entry_t *elem)
406 {
407         if (elem->reg_pressure)
408                 del_pset(elem->reg_pressure);
409
410         if (elem->sched_ready)
411                 stat_delete_distrib_tbl(elem->sched_ready);
412
413         if (elem->perm_class_stat)
414                 del_pset(elem->perm_class_stat);
415
416         elem->reg_pressure    = new_pset(reg_pressure_cmp, 5);
417         elem->sched_ready     = stat_new_int_distrib_tbl();
418         elem->perm_class_stat = new_pset(perm_class_cmp, 5);
419 }  /* be_block_clear_entry */
420
421 /**
422  * Returns the associated be_block_entry_t for an block.
423  *
424  * @param block_nr  an IR  block number
425  * @param hmap      a hash map containing long -> be_block_entry_t
426  */
427 static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, hmap_be_block_entry_t *hmap)
428 {
429         be_block_entry_t key;
430         be_block_entry_t *elem;
431
432         key.block_nr = block_nr;
433
434         elem = pset_find(hmap, &key, block_nr);
435         if (elem)
436                 return elem;
437
438         elem = obstack_alloc(obst, sizeof(*elem));
439         memset(elem, 0, sizeof(*elem));
440
441         /* clear new counter */
442         be_block_clear_entry(elem);
443
444         elem->block_nr = block_nr;
445
446         return pset_insert(hmap, elem, block_nr);
447 }  /* be_block_get_entry */
448
449 /**
450  * clears all sets in perm_class_entry_t
451  */
452 static void perm_class_clear_entry(perm_class_entry_t *elem) {
453         if (elem->perm_stat)
454                 del_pset(elem->perm_stat);
455
456         elem->perm_stat = new_pset(perm_stat_cmp, 5);
457 }  /* perm_class_clear_entry */
458
459 /**
460  * Returns the associated perm_class entry for a register class.
461  *
462  * @param class_name  the register class name
463  * @param hmap        a hash map containing class_name -> perm_class_entry_t
464  */
465 static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char *class_name,
466                                                 hmap_perm_class_entry_t *hmap)
467 {
468         perm_class_entry_t key;
469         perm_class_entry_t *elem;
470
471         key.class_name = class_name;
472
473         elem = pset_find(hmap, &key, HASH_PTR(class_name));
474         if (elem)
475                 return elem;
476
477         elem = obstack_alloc(obst, sizeof(*elem));
478         memset(elem, 0, sizeof(*elem));
479
480         /* clear new counter */
481         perm_class_clear_entry(elem);
482
483         elem->class_name = class_name;
484
485         return pset_insert(hmap, elem, HASH_PTR(class_name));
486 }  /* perm_class_get_entry */
487
488 /**
489  * clears all sets in perm_stat_entry_t
490  */
491 static void perm_stat_clear_entry(perm_stat_entry_t *elem) {
492         if (elem->chains)
493                 stat_delete_distrib_tbl(elem->chains);
494
495         if (elem->cycles)
496                 stat_delete_distrib_tbl(elem->cycles);
497
498         elem->chains = stat_new_int_distrib_tbl();
499         elem->cycles = stat_new_int_distrib_tbl();
500 }  /* perm_stat_clear_entry */
501
502 /**
503  * Returns the associated perm_stat entry for a perm.
504  *
505  * @param perm      the perm node
506  * @param hmap      a hash map containing perm -> perm_stat_entry_t
507  */
508 static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *perm, hmap_perm_stat_entry_t *hmap)
509 {
510         perm_stat_entry_t key;
511         perm_stat_entry_t *elem;
512
513         key.perm = perm;
514
515         elem = pset_find(hmap, &key, HASH_PTR(perm));
516         if (elem)
517                 return elem;
518
519         elem = obstack_alloc(obst, sizeof(*elem));
520         memset(elem, 0, sizeof(*elem));
521
522         /* clear new counter */
523         perm_stat_clear_entry(elem);
524
525         elem->perm = perm;
526
527         return pset_insert(hmap, elem, HASH_PTR(perm));
528 }  /* perm_stat_get_entry */
529
530 /**
531  * Clear optimizations counter,
532  */
533 static void clear_optimization_counter(void)  {
534         int i;
535         for (i = 0; i < FS_OPT_MAX; ++i)
536                 cnt_clr(&status->num_opts[i]);
537 }
538
539 /**
540  * Returns the ir_op for an IR-node,
541  * handles special cases and return pseudo op codes.
542  *
543  * @param none  an IR node
544  */
545 static ir_op *stat_get_irn_op(ir_node *node)
546 {
547         ir_op *op = get_irn_op(node);
548         ir_opcode opc = op->code;
549
550         switch (opc) {
551         case iro_Phi:
552                 if (get_irn_arity(node) == 0) {
553                         /* special case, a Phi0 node, count on extra counter */
554                         op = status->op_Phi0 ? status->op_Phi0 : op;
555                 } else if (get_irn_mode(node) == mode_M) {
556                         /* special case, a Memory Phi node, count on extra counter */
557                         op = status->op_PhiM ? status->op_PhiM : op;
558                 }  /* if */
559                 break;
560         case iro_Proj:
561                 if (get_irn_mode(node) == mode_M) {
562                         /* special case, a Memory Proj node, count on extra counter */
563                         op = status->op_ProjM ? status->op_ProjM : op;
564                 }  /* if */
565                 break;
566         case iro_Mul:
567                 if (get_irn_op(get_Mul_left(node)) == op_Const || get_irn_op(get_Mul_right(node)) == op_Const) {
568                         /* special case, a Multiply by a const, count on extra counter */
569                         op = status->op_MulC ? status->op_MulC : op;
570                 }  /* if */
571                 break;
572         case iro_Div:
573                 if (get_irn_op(get_Div_right(node)) == op_Const) {
574                         /* special case, a division by a const, count on extra counter */
575                         op = status->op_DivC ? status->op_DivC : op;
576                 }  /* if */
577                 break;
578         case iro_Mod:
579                 if (get_irn_op(get_Mod_right(node)) == op_Const) {
580                         /* special case, a module by a const, count on extra counter */
581                         op = status->op_ModC ? status->op_ModC : op;
582                 }  /* if */
583                 break;
584         case iro_DivMod:
585                 if (get_irn_op(get_DivMod_right(node)) == op_Const) {
586                         /* special case, a division/modulo by a const, count on extra counter */
587                         op = status->op_DivModC ? status->op_DivModC : op;
588                 }  /* if */
589                 break;
590         case iro_Sel:
591                 if (get_irn_op(get_Sel_ptr(node)) == op_Sel) {
592                         /* special case, a Sel of a Sel, count on extra counter */
593                         op = status->op_SelSel ? status->op_SelSel : op;
594                         if (get_irn_op(get_Sel_ptr(get_Sel_ptr(node))) == op_Sel) {
595                                 /* special case, a Sel of a Sel of a Sel, count on extra counter */
596                                 op = status->op_SelSelSel ? status->op_SelSelSel : op;
597                         }  /* if */
598                 }  /* if */
599                 break;
600         default:
601                 ;
602         }  /* switch */
603
604         return op;
605 }  /* stat_get_irn_op */
606
607 /**
608  * update the block counter
609  */
610 static void undate_block_info(ir_node *node, graph_entry_t *graph)
611 {
612         ir_op *op = get_irn_op(node);
613         ir_node *block;
614         block_entry_t *b_entry;
615         int i, arity;
616
617         /* check for block */
618         if (op == op_Block) {
619                 arity = get_irn_arity(node);
620                 b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash);
621                 /* mark start end block to allow to filter them out */
622                 if (node == get_irg_start_block(graph->irg))
623                         b_entry->is_start = 1;
624                 else if (node == get_irg_end_block(graph->irg))
625                         b_entry->is_end = 1;
626
627                 /* count all incoming edges */
628                 for (i = 0; i < arity; ++i) {
629                         ir_node *pred = get_irn_n(node, i);
630                         ir_node *other_block = get_nodes_block(pred);
631                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
632
633                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
634                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
635                 }  /* for */
636                 return;
637         }  /* if */
638
639         block   = get_nodes_block(node);
640         b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(block), graph->block_hash);
641
642         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
643                 /* count data Phi per block */
644                 cnt_inc(&b_entry->cnt[bcnt_phi_data]);
645         }  /* if */
646
647         /* we have a new node in our block */
648         cnt_inc(&b_entry->cnt[bcnt_nodes]);
649
650         /* don't count keep-alive edges */
651         if (is_End(node))
652                 return;
653
654         arity = get_irn_arity(node);
655
656         for (i = 0; i < arity; ++i) {
657                 ir_node *pred = get_irn_n(node, i);
658                 ir_node *other_block;
659
660                 other_block = get_nodes_block(pred);
661
662                 if (other_block == block)
663                         cnt_inc(&b_entry->cnt[bcnt_edges]);     /* a in block edge */
664                 else {
665                         block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash);
666
667                         cnt_inc(&b_entry->cnt[bcnt_in_edges]);  /* an edge coming from another block */
668                         cnt_inc(&b_entry_other->cnt[bcnt_out_edges]);
669                 }  /* if */
670         }  /* for */
671 }  /* undate_block_info */
672
673 /**
674  * Update the extended block counter.
675  */
676 static void update_extbb_info(ir_node *node, graph_entry_t *graph)
677 {
678         ir_op *op = get_irn_op(node);
679         ir_extblk *extbb;
680         extbb_entry_t *eb_entry;
681         int i, arity;
682
683         /* check for block */
684         if (op == op_Block) {
685                 extbb = get_nodes_extbb(node);
686                 arity = get_irn_arity(node);
687                 eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
688
689                 /* count all incoming edges */
690                 for (i = 0; i < arity; ++i) {
691                         ir_node *pred = get_irn_n(node, i);
692                         ir_extblk *other_extbb = get_nodes_extbb(pred);
693
694                         if (extbb != other_extbb) {
695                                 extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
696
697                                 cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
698                                 cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
699                         }  /* if */
700                 }  /* for */
701                 return;
702         }  /* if */
703
704         extbb    = get_nodes_extbb(node);
705         eb_entry = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(extbb), graph->extbb_hash);
706
707         if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
708                 /* count data Phi per extbb */
709                 cnt_inc(&eb_entry->cnt[bcnt_phi_data]);
710         }  /* if */
711
712         /* we have a new node in our block */
713         cnt_inc(&eb_entry->cnt[bcnt_nodes]);
714
715         /* don't count keep-alive edges */
716         if (get_irn_op(node) == op_End)
717                 return;
718
719         arity = get_irn_arity(node);
720
721         for (i = 0; i < arity; ++i) {
722                 ir_node *pred = get_irn_n(node, i);
723                 ir_extblk *other_extbb = get_nodes_extbb(pred);
724
725                 if (other_extbb == extbb)
726                         cnt_inc(&eb_entry->cnt[bcnt_edges]);    /* a in extbb edge */
727                 else {
728                         extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash);
729
730                         cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */
731                         cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]);
732                 }  /* if */
733         }  /* for */
734 }  /* update_extbb_info */
735
736 /**
737  * Calculates how many arguments of the call are const, updates
738  * param distribution.
739  */
740 static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call) {
741         int i, num_const_args = 0, num_local_adr = 0;
742         int n = get_Call_n_params(call);
743
744         for (i = 0; i < n; ++i) {
745                 ir_node *param = get_Call_param(call, i);
746
747                 if (is_irn_constlike(param))
748                         ++num_const_args;
749                 else if (is_Sel(param)) {
750                         ir_node *base = param;
751
752                         do {
753                                 base = get_Sel_ptr(base);
754                         } while (is_Sel(base));
755
756                         if (base == get_irg_frame(current_ir_graph))
757                                 ++num_local_adr;
758                 }
759
760         }  /* for */
761
762         if (num_const_args > 0)
763                 cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
764         if (num_const_args == n)
765                 cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
766         if (num_local_adr > 0)
767                 cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
768
769         stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
770 }  /* analyse_params_of_Call */
771
772 /**
773  * Update info on calls.
774  *
775  * @param call   The call
776  * @param graph  The graph entry containing the call
777  */
778 static void stat_update_call(ir_node *call, graph_entry_t *graph)
779 {
780         ir_node   *block = get_nodes_block(call);
781         ir_node   *ptr = get_Call_ptr(call);
782         ir_entity *ent = NULL;
783         ir_graph  *callee = NULL;
784
785         /*
786          * If the block is bad, the whole subgraph will collapse later
787          * so do not count this call.
788          * This happens in dead code.
789          */
790         if (is_Bad(block))
791                 return;
792
793         cnt_inc(&graph->cnt[gcnt_all_calls]);
794
795         /* found a call, this function is not a leaf */
796         graph->is_leaf = 0;
797
798         if (get_irn_op(ptr) == op_SymConst) {
799                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
800                         /* ok, we seems to know the entity */
801                         ent = get_SymConst_entity(ptr);
802                         callee = get_entity_irg(ent);
803
804                         /* it is recursive, if it calls at least once */
805                         if (callee == graph->irg)
806                                 graph->is_recursive = 1;
807                 }  /* if */
808         } else {
809                 /* indirect call, be could not predict */
810                 cnt_inc(&graph->cnt[gcnt_indirect_calls]);
811
812                 /* NOT a leaf call */
813                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
814         }  /* if */
815
816         /* check, if it's a chain-call: Then, the call-block
817         * must dominate the end block. */
818         {
819                 ir_node *curr = get_irg_end_block(graph->irg);
820                 int depth = get_Block_dom_depth(block);
821
822                 for (; curr != block && get_Block_dom_depth(curr) > depth;) {
823                         curr = get_Block_idom(curr);
824
825                         if (! curr || is_no_Block(curr))
826                                 break;
827                 }  /* for */
828
829                 if (curr != block)
830                         graph->is_chain_call = 0;
831         }
832
833         /* check, if the callee is a leaf */
834         if (callee) {
835                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
836
837                 if (called->is_analyzed) {
838                         if (! called->is_leaf)
839                                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
840                 }  /* if */
841         }  /* if */
842
843         analyse_params_of_Call(graph, call);
844 }  /* stat_update_call */
845
846 /**
847  * Update info on calls for graphs on the wait queue.
848  */
849 static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
850 {
851         ir_node   *block = get_nodes_block(call);
852         ir_node   *ptr = get_Call_ptr(call);
853         ir_entity *ent = NULL;
854         ir_graph  *callee = NULL;
855
856         /*
857          * If the block is bad, the whole subgraph will collapse later
858          * so do not count this call.
859          * This happens in dead code.
860          */
861         if (is_Bad(block))
862                 return;
863
864         if (get_irn_op(ptr) == op_SymConst) {
865                 if (get_SymConst_kind(ptr) == symconst_addr_ent) {
866                         /* ok, we seems to know the entity */
867                         ent = get_SymConst_entity(ptr);
868                         callee = get_entity_irg(ent);
869                 }  /* if */
870         }  /* if */
871
872         /* check, if the callee is a leaf */
873         if (callee) {
874                 graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
875
876                 assert(called->is_analyzed);
877
878                 if (! called->is_leaf)
879                         graph->is_leaf_call = LCS_NON_LEAF_CALL;
880         } else
881                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
882 }  /* stat_update_call_2 */
883
884 /**
885  * Find the base address and entity of an Sel node.
886  *
887  * @param sel  the node
888  *
889  * @return the base address.
890  */
891 static ir_node *find_base_adr(ir_node *sel) {
892         ir_node *ptr = get_Sel_ptr(sel);
893
894         while (is_Sel(ptr)) {
895                 sel = ptr;
896                 ptr = get_Sel_ptr(sel);
897         }
898         return ptr;
899 }  /* find_base_adr */
900
901 /**
902  * Update info on Load/Store address statistics.
903  */
904 static void stat_update_address(ir_node *node, graph_entry_t *graph) {
905         ir_opcode opc = get_irn_opcode(node);
906         ir_node *base;
907         ir_graph *irg;
908
909         switch (opc) {
910         case iro_SymConst:
911                 /* a global address */
912                 cnt_inc(&graph->cnt[gcnt_global_adr]);
913                 break;
914         case iro_Sel:
915                 base = find_base_adr(node);
916                 irg = current_ir_graph;
917                 if (base == get_irg_tls(irg)) {
918                         /* a TLS variable, like a global. */
919                         cnt_inc(&graph->cnt[gcnt_global_adr]);
920                 } else if (base == get_irg_frame(irg)) {
921                         /* a local Variable. */
922                         cnt_inc(&graph->cnt[gcnt_local_adr]);
923                 } else {
924                         /* Pointer access */
925                         if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
926                                 /* pointer access through parameter, check for THIS */
927                                 ir_entity *ent = get_irg_entity(irg);
928
929                                 if (ent != NULL) {
930                                         ir_type *ent_tp = get_entity_type(ent);
931
932                                         if (get_method_calling_convention(ent_tp) & cc_this_call) {
933                                                 if (get_Proj_proj(base) == 0) {
934                                                         /* THIS pointer */
935                                                         cnt_inc(&graph->cnt[gcnt_this_adr]);
936                                                         goto end_parameter;
937                                                 }  /* if */
938                                         }  /* if */
939                                 }  /* if */
940                                 /* other parameter */
941                                 cnt_inc(&graph->cnt[gcnt_param_adr]);
942 end_parameter:  ;
943                         } else {
944                                 /* unknown Pointer access */
945                                 cnt_inc(&graph->cnt[gcnt_other_adr]);
946                         }  /* if */
947                 }  /* if */
948         default:
949                 ;
950         }  /* switch */
951 }  /* stat_update_address */
952
953 /**
954  * Walker for reachable nodes count.
955  */
956 static void update_node_stat(ir_node *node, void *env)
957 {
958         graph_entry_t *graph = env;
959         node_entry_t *entry;
960
961         ir_op *op = stat_get_irn_op(node);
962         int arity = get_irn_arity(node);
963
964         entry = opcode_get_entry(op, graph->opcode_hash);
965
966         cnt_inc(&entry->cnt_alive);
967         cnt_add_i(&graph->cnt[gcnt_edges], arity);
968
969         /* count block edges */
970         undate_block_info(node, graph);
971
972         /* count extended block edges */
973         if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
974                 if (graph->irg != get_const_code_irg())
975                         update_extbb_info(node, graph);
976         }  /* if */
977
978         /* handle statistics for special node types */
979
980         switch (op->code) {
981         case iro_Const:
982                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
983                         /* check properties of constants */
984                         stat_update_const(status, node, graph);
985                 }  /* if */
986                 break;
987         case iro_Call:
988                 /* check for properties that depends on calls like recursion/leaf/indirect call */
989                 stat_update_call(node, graph);
990                 break;
991         case iro_Load:
992                 /* check address properties */
993                 stat_update_address(get_Load_ptr(node), graph);
994                 break;
995         case iro_Store:
996                 /* check address properties */
997                 stat_update_address(get_Store_ptr(node), graph);
998                 break;
999         default:
1000                 ;
1001         }  /* switch */
1002 }  /* update_node_stat */
1003
1004 /**
1005  * Walker for reachable nodes count for graphs on the wait_q.
1006  */
1007 static void update_node_stat_2(ir_node *node, void *env) {
1008         graph_entry_t *graph = env;
1009
1010         /* check for properties that depends on calls like recursion/leaf/indirect call */
1011         if (is_Call(node))
1012                 stat_update_call_2(node, graph);
1013 }  /* update_node_stat_2 */
1014
1015 /**
1016  * Get the current address mark.
1017  */
1018 static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node) {
1019         address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1020
1021         return value ? value->mark : 0;
1022 }  /* get_adr_mark */
1023
1024 /**
1025  * Set the current address mark.
1026  */
1027 static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val) {
1028         address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
1029
1030         value->mark = val;
1031 }  /* set_adr_mark */
1032
1033 #undef DUMP_ADR_MODE
1034
1035 #ifdef DUMP_ADR_MODE
1036 /**
1037  * a vcg attribute hook: Color a node with a different color if
1038  * it's identified as a part of an address expression or at least referenced
1039  * by an address expression.
1040  */
1041 static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
1042 {
1043         ir_node *n           = local ? local : node;
1044         ir_graph *irg        = get_irn_irg(n);
1045         graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1046         unsigned mark        = get_adr_mark(graph, n);
1047
1048         if (mark & MARK_ADDRESS_CALC)
1049                 fprintf(F, "color: purple");
1050         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1051                 fprintf(F, "color: pink");
1052         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1053                 fprintf(F, "color: lightblue");
1054         else
1055                 return 0;
1056
1057         /* I know the color! */
1058         return 1;
1059 }  /* stat_adr_mark_hook */
1060 #endif /* DUMP_ADR_MODE */
1061
1062 /**
1063  * Return the "operational" mode of a Firm node.
1064  */
1065 static ir_mode *get_irn_op_mode(ir_node *node) {
1066         switch (get_irn_opcode(node)) {
1067         case iro_Load:
1068                 return get_Load_mode(node);
1069         case iro_Store:
1070                 return get_irn_mode(get_Store_value(node));
1071         case iro_DivMod:
1072                 return get_irn_mode(get_DivMod_left(node));
1073         case iro_Div:
1074                 return get_irn_mode(get_Div_left(node));
1075         case iro_Mod:
1076                 return get_irn_mode(get_Mod_left(node));
1077         case iro_Cmp:
1078                 /* Cmp is no address calculation, or is it? */
1079         default:
1080                 return get_irn_mode(node);
1081         }  /* switch */
1082 }  /* get_irn_op_mode */
1083
1084 /**
1085  * Post-walker that marks every node that is an address calculation.
1086  *
1087  * Users of a node must be visited first. We ensure this by
1088  * calling it in the post of an outs walk. This should work even in cycles,
1089  * while the normal pre-walk will not.
1090  */
1091 static void mark_address_calc(ir_node *node, void *env) {
1092         graph_entry_t *graph = env;
1093         ir_mode *mode = get_irn_op_mode(node);
1094         int i, n;
1095         unsigned mark_preds = MARK_REF_NON_ADR;
1096
1097         if (! mode_is_data(mode))
1098                 return;
1099
1100         if (mode_is_reference(mode)) {
1101                 /* a reference is calculated here, we are sure */
1102                 set_adr_mark(graph, node, MARK_ADDRESS_CALC);
1103
1104                 mark_preds = MARK_REF_ADR;
1105         } else {
1106                 unsigned mark = get_adr_mark(graph, node);
1107
1108                 if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
1109                         /*
1110                          * this node has no reference mode, but is only
1111                          * referenced by address calculations
1112                          */
1113                         mark_preds = MARK_REF_ADR;
1114                 }  /* if */
1115         }  /* if */
1116
1117         /* mark all predecessors */
1118         for (i = 0, n = get_irn_arity(node); i < n; ++i) {
1119                 ir_node *pred = get_irn_n(node, i);
1120
1121                 mode = get_irn_op_mode(pred);
1122                 if (! mode_is_data(mode))
1123                         continue;
1124
1125                 set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
1126         }  /* for */
1127 }  /* mark_address_calc */
1128
1129 /**
1130  * Post-walker that marks every node that is an address calculation.
1131  *
1132  * Users of a node must be visited first. We ensure this by
1133  * calling it in the post of an outs walk. This should work even in cycles,
1134  * while the normal pre-walk will not.
1135  */
1136 static void count_adr_ops(ir_node *node, void *env) {
1137         graph_entry_t *graph = env;
1138         unsigned mark        = get_adr_mark(graph, node);
1139
1140         if (mark & MARK_ADDRESS_CALC)
1141                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1142         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
1143                 cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
1144         else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
1145                 cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
1146 }  /* count_adr_ops */
1147
1148 /**
1149  * Called for every graph when the graph is either deleted or stat_dump_snapshot()
1150  * is called, must recalculate all statistic info.
1151  *
1152  * @param global    The global entry
1153  * @param graph     The current entry
1154  */
1155 static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
1156 {
1157         node_entry_t *entry;
1158         int i;
1159
1160         /* clear first the alive counter in the graph */
1161         foreach_pset(graph->opcode_hash, entry) {
1162                 cnt_clr(&entry->cnt_alive);
1163         }  /* foreach_pset */
1164
1165         /* set pessimistic values */
1166         graph->is_leaf       = 1;
1167         graph->is_leaf_call  = LCS_UNKNOWN;
1168         graph->is_recursive  = 0;
1169         graph->is_chain_call = 1;
1170
1171         /* create new block counter */
1172         graph->block_hash = new_pset(block_cmp, 5);
1173
1174         /* we need dominator info */
1175         if (graph->irg != get_const_code_irg()) {
1176                 assure_doms(graph->irg);
1177
1178                 if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
1179                         /* we need extended basic blocks */
1180                         compute_extbb(graph->irg);
1181
1182                         /* create new extbb counter */
1183                         graph->extbb_hash = new_pset(block_cmp, 5);
1184                 }  /* if */
1185         }  /* if */
1186
1187         /* count the nodes in the graph */
1188         irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
1189
1190 #if 0
1191         /* Uncomment this code if chain-call means call exact one. */
1192         entry = opcode_get_entry(op_Call, graph->opcode_hash);
1193
1194         /* check if we have more than 1 call */
1195         if (cnt_gt(entry->cnt_alive, 1))
1196                 graph->is_chain_call = 0;
1197 #endif
1198
1199         /* recursive functions are never chain calls, leafs don't have calls */
1200         if (graph->is_recursive || graph->is_leaf)
1201                 graph->is_chain_call = 0;
1202
1203         /* assume we walk every graph only ONCE, we could sum here the global count */
1204         foreach_pset(graph->opcode_hash, entry) {
1205                 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
1206
1207                 /* update the node counter */
1208                 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
1209         }  /* foreach_pset */
1210
1211         /* count the number of address calculation */
1212         if (graph->irg != get_const_code_irg()) {
1213                 ir_graph *rem = current_ir_graph;
1214
1215                 assure_irg_outs(graph->irg);
1216
1217                 /* Must be done an the outs graph */
1218                 current_ir_graph = graph->irg;
1219                 irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
1220                 current_ir_graph = rem;
1221
1222 #ifdef DUMP_ADR_MODE
1223                 /* register the vcg hook and dump the graph for test */
1224                 set_dump_node_vcgattr_hook(stat_adr_mark_hook);
1225                 dump_ir_block_graph(graph->irg, "-adr");
1226                 set_dump_node_vcgattr_hook(NULL);
1227 #endif /* DUMP_ADR_MODE */
1228
1229                 irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
1230         }  /* if */
1231
1232         /* count the DAG's */
1233         if (status->stat_options & FIRMSTAT_COUNT_DAG)
1234                 count_dags_in_graph(global, graph);
1235
1236         /* calculate the patterns of this graph */
1237         stat_calc_pattern_history(graph->irg);
1238
1239         /* leaf function did not call others */
1240         if (graph->is_leaf)
1241                 graph->is_leaf_call = LCS_NON_LEAF_CALL;
1242         else if (graph->is_leaf_call == LCS_UNKNOWN) {
1243                 /* we still don't know if this graph calls leaf-functions, so enqueue */
1244                 pdeq_putl(status->wait_q, graph);
1245         }  /* if */
1246
1247         /* we have analyzed this graph */
1248         graph->is_analyzed = 1;
1249
1250         /* accumulate all counter's */
1251         for (i = 0; i < _gcnt_last; ++i)
1252                 cnt_add(&global->cnt[i], &graph->cnt[i]);
1253 }  /* update_graph_stat */
1254
1255 /**
1256  * Called for every graph that was on the wait_q in stat_dump_snapshot()
1257  * must finish all statistic info calculations.
1258  *
1259  * @param global    The global entry
1260  * @param graph     The current entry
1261  */
1262 static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
1263 {
1264         (void) global;
1265         if (graph->is_deleted) {
1266                 /* deleted, ignore */
1267                 return;
1268         }
1269
1270         if (graph->irg) {
1271                 /* count the nodes in the graph */
1272                 irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
1273
1274                 if (graph->is_leaf_call == LCS_UNKNOWN)
1275                         graph->is_leaf_call = LCS_LEAF_CALL;
1276         }  /* if */
1277 }  /* update_graph_stat_2 */
1278
1279 /**
1280  * Register a dumper.
1281  */
1282 static void stat_register_dumper(const dumper_t *dumper) {
1283         dumper_t *p = xmalloc(sizeof(*p));
1284
1285         if (p) {
1286                 memcpy(p, dumper, sizeof(*p));
1287
1288                 p->next        = status->dumper;
1289                 p->status      = status;
1290                 status->dumper = p;
1291         }
1292
1293         /* FIXME: memory leak */
1294 }  /* stat_register_dumper */
1295
1296 /**
1297  * Dumps the statistics of an IR graph.
1298  */
1299 static void stat_dump_graph(graph_entry_t *entry) {
1300         dumper_t *dumper;
1301
1302         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1303                 if (dumper->dump_graph)
1304                         dumper->dump_graph(dumper, entry);
1305         }  /* for */
1306 }  /* stat_dump_graph */
1307
1308 /**
1309  * Calls all registered dumper functions.
1310  */
1311 static void stat_dump_registered(graph_entry_t *entry) {
1312         dumper_t *dumper;
1313
1314         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1315                 if (dumper->func_map) {
1316                         dump_graph_FUNC func;
1317
1318                         foreach_pset(dumper->func_map, func)
1319                                 func(dumper, entry);
1320                 }  /* if */
1321         }  /* for */
1322 }  /* stat_dump_registered */
1323
1324 /**
1325  * Dumps a constant table.
1326  */
1327 static void stat_dump_consts(const constant_info_t *tbl) {
1328         dumper_t *dumper;
1329
1330         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1331                 if (dumper->dump_const_tbl)
1332                         dumper->dump_const_tbl(dumper, tbl);
1333         }  /* for */
1334 }  /* stat_dump_consts */
1335
1336 /**
1337  * Dumps the parameter distribution
1338  */
1339 static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global) {
1340         dumper_t *dumper;
1341
1342         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1343                 if (dumper->dump_param_tbl)
1344                         dumper->dump_param_tbl(dumper, tbl, global);
1345         }  /* for */
1346 }  /* stat_dump_param_tbl */
1347
1348 /**
1349  * Dumps the optimization counter
1350  */
1351 static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len) {
1352         dumper_t *dumper;
1353
1354         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1355                 if (dumper->dump_opt_cnt)
1356                         dumper->dump_opt_cnt(dumper, tbl, len);
1357         }  /* for */
1358 }  /* stat_dump_opt_cnt */
1359
1360 /**
1361  * Initialize the dumper.
1362  */
1363 static void stat_dump_init(const char *name) {
1364         dumper_t *dumper;
1365
1366         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1367                 if (dumper->init)
1368                         dumper->init(dumper, name);
1369         }  /* for */
1370 }  /* stat_dump_init */
1371
1372 /**
1373  * Finish the dumper.
1374  */
1375 static void stat_dump_finish(void) {
1376         dumper_t *dumper;
1377
1378         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1379                 if (dumper->finish)
1380                         dumper->finish(dumper);
1381         }  /* for */
1382 }  /* stat_dump_finish */
1383
1384 /**
1385  * Register an additional function for all dumper.
1386  */
1387 void stat_register_dumper_func(dump_graph_FUNC func) {
1388         dumper_t *dumper;
1389
1390         for (dumper = status->dumper; dumper; dumper = dumper->next) {
1391                 if (! dumper->func_map)
1392                         dumper->func_map = pset_new_ptr(3);
1393                 pset_insert_ptr(dumper->func_map, func);
1394         }  /* for */
1395 }  /* stat_register_dumper_func */
1396
1397 /* ---------------------------------------------------------------------- */
1398
1399 /*
1400  * Helper: get an ir_op from an opcode.
1401  */
1402 ir_op *stat_get_op_from_opcode(ir_opcode code) {
1403         return opcode_find_entry(code, status->ir_op_hash);
1404 }  /* stat_get_op_from_opcode */
1405
1406 /**
1407  * Hook: A new IR op is registered.
1408  *
1409  * @param ctx  the hook context
1410  * @param op   the new IR opcode that was created.
1411  */
1412 static void stat_new_ir_op(void *ctx, ir_op *op) {
1413         (void) ctx;
1414         if (! status->stat_options)
1415                 return;
1416
1417         STAT_ENTER;
1418         {
1419                 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
1420
1421                 /* execute for side effect :-) */
1422                 (void)opcode_get_entry(op, graph->opcode_hash);
1423
1424                 pset_insert(status->ir_op_hash, op, op->code);
1425         }
1426         STAT_LEAVE;
1427 }  /* stat_new_ir_op */
1428
1429 /**
1430  * Hook: An IR op is freed.
1431  *
1432  * @param ctx  the hook context
1433  * @param op   the IR opcode that is freed
1434  */
1435 static void stat_free_ir_op(void *ctx, ir_op *op) {
1436         (void) ctx;
1437         (void) op;
1438         if (! status->stat_options)
1439                 return;
1440
1441         STAT_ENTER;
1442         {
1443         }
1444         STAT_LEAVE;
1445 }  /* stat_free_ir_op */
1446
1447 /**
1448  * Hook: A new node is created.
1449  *
1450  * @param ctx   the hook context
1451  * @param irg   the IR graph on which the node is created
1452  * @param node  the new IR node that was created
1453  */
1454 static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) {
1455         (void) ctx;
1456         (void) irg;
1457         if (! status->stat_options)
1458                 return;
1459
1460         /* do NOT count during dead node elimination */
1461         if (status->in_dead_node_elim)
1462                 return;
1463
1464         STAT_ENTER;
1465         {
1466                 node_entry_t *entry;
1467                 graph_entry_t *graph;
1468                 ir_op *op = stat_get_irn_op(node);
1469
1470                 /* increase global value */
1471                 graph = graph_get_entry(NULL, status->irg_hash);
1472                 entry = opcode_get_entry(op, graph->opcode_hash);
1473                 cnt_inc(&entry->new_node);
1474
1475                 /* increase local value */
1476                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1477                 entry = opcode_get_entry(op, graph->opcode_hash);
1478                 cnt_inc(&entry->new_node);
1479         }
1480         STAT_LEAVE;
1481 }  /* stat_new_node */
1482
1483 /**
1484  * Hook: A node is changed into a Id node
1485  *
1486  * @param ctx   the hook context
1487  * @param node  the IR node that will be turned into an ID
1488  */
1489 static void stat_turn_into_id(void *ctx, ir_node *node) {
1490         (void) ctx;
1491         if (! status->stat_options)
1492                 return;
1493
1494         STAT_ENTER;
1495         {
1496                 node_entry_t *entry;
1497                 graph_entry_t *graph;
1498                 ir_op *op = stat_get_irn_op(node);
1499
1500                 /* increase global value */
1501                 graph = graph_get_entry(NULL, status->irg_hash);
1502                 entry = opcode_get_entry(op, graph->opcode_hash);
1503                 cnt_inc(&entry->into_Id);
1504
1505                 /* increase local value */
1506                 graph = graph_get_entry(current_ir_graph, status->irg_hash);
1507                 entry = opcode_get_entry(op, graph->opcode_hash);
1508                 cnt_inc(&entry->into_Id);
1509         }
1510         STAT_LEAVE;
1511 }  /* stat_turn_into_id */
1512
1513 /**
1514  * Hook: A new graph was created
1515  *
1516  * @param ctx  the hook context
1517  * @param irg  the new IR graph that was created
1518  * @param ent  the entity of this graph
1519  */
1520 static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent) {
1521         (void) ctx;
1522         if (! status->stat_options)
1523                 return;
1524
1525         STAT_ENTER;
1526         {
1527                 /* execute for side effect :-) */
1528                 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
1529
1530                 graph->ent           = ent;
1531                 graph->is_deleted    = 0;
1532                 graph->is_leaf       = 0;
1533                 graph->is_leaf_call  = 0;
1534                 graph->is_recursive  = 0;
1535                 graph->is_chain_call = 0;
1536                 graph->is_analyzed   = 0;
1537         }
1538         STAT_LEAVE;
1539 }  /* stat_new_graph */
1540
1541 /**
1542  * Hook: A graph will be deleted
1543  *
1544  * @param ctx  the hook context
1545  * @param irg  the IR graph that will be deleted
1546  *
1547  * Note that we still hold the information for this graph
1548  * in our hash maps, only a flag is set which prevents this
1549  * information from being changed, it's "frozen" from now.
1550  */
1551 static void stat_free_graph(void *ctx, ir_graph *irg) {
1552         (void) ctx;
1553         if (! status->stat_options)
1554                 return;
1555
1556         STAT_ENTER;
1557         {
1558                 graph_entry_t *graph  = graph_get_entry(irg, status->irg_hash);
1559                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
1560
1561                 graph->is_deleted = 1;
1562
1563                 if (status->stat_options & FIRMSTAT_COUNT_DELETED) {
1564                         /* count the nodes of the graph yet, it will be destroyed later */
1565                         update_graph_stat(global, graph);
1566                 }  /* if */
1567         }
1568         STAT_LEAVE;
1569 }  /* stat_free_graph */
1570
1571 /**
1572  * Hook: A walk over a graph is initiated. Do not count walks from statistic code.
1573  *
1574  * @param ctx  the hook context
1575  * @param irg  the IR graph that will be walked
1576  * @param pre  the pre walker
1577  * @param post the post walker
1578  */
1579 static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1580 {
1581         (void) ctx;
1582         (void) pre;
1583         (void) post;
1584         if (! status->stat_options)
1585                 return;
1586
1587         STAT_ENTER_SINGLE;
1588         {
1589                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1590
1591                 cnt_inc(&graph->cnt[gcnt_acc_walked]);
1592         }
1593         STAT_LEAVE;
1594 }  /* stat_irg_walk */
1595
1596 /**
1597  * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code.
1598  *
1599  * @param ctx  the hook context
1600  * @param irg  the IR graph that will be walked
1601  * @param pre  the pre walker
1602  * @param post the post walker
1603  */
1604 static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post)
1605 {
1606         /* for now, do NOT differentiate between blockwise and normal */
1607         stat_irg_walk(ctx, irg, pre, post);
1608 }  /* stat_irg_walk_blkwise */
1609
1610 /**
1611  * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code.
1612  *
1613  * @param ctx  the hook context
1614  * @param irg  the IR graph that will be walked
1615  * @param node the IR node
1616  * @param pre  the pre walker
1617  * @param post the post walker
1618  */
1619 static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post)
1620 {
1621         (void) ctx;
1622         (void) node;
1623         (void) pre;
1624         (void) post;
1625         if (! status->stat_options)
1626                 return;
1627
1628         STAT_ENTER_SINGLE;
1629         {
1630                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1631
1632                 cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]);
1633         }
1634         STAT_LEAVE;
1635 }  /* stat_irg_block_walk */
1636
1637 /**
1638  * Called for every node that is removed due to an optimization.
1639  *
1640  * @param n     the IR node that will be removed
1641  * @param hmap  the hash map containing ir_op* -> opt_entry_t*
1642  */
1643 static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap) {
1644         ir_op *op          = stat_get_irn_op(n);
1645         opt_entry_t *entry = opt_get_entry(op, hmap);
1646
1647         /* increase global value */
1648         cnt_inc(&entry->count);
1649 }  /* removed_due_opt */
1650
1651 /**
1652  * Hook: Some nodes were optimized into some others due to an optimization.
1653  *
1654  * @param ctx  the hook context
1655  */
1656 static void stat_merge_nodes(
1657     void *ctx,
1658     ir_node **new_node_array, int new_num_entries,
1659     ir_node **old_node_array, int old_num_entries,
1660     hook_opt_kind opt)
1661 {
1662         (void) ctx;
1663         if (! status->stat_options)
1664                 return;
1665
1666         STAT_ENTER;
1667         {
1668                 int i, j;
1669                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1670
1671                 cnt_inc(&status->num_opts[opt]);
1672                 if (status->reassoc_run)
1673                         opt = HOOK_OPT_REASSOC;
1674
1675                 for (i = 0; i < old_num_entries; ++i) {
1676                         /* nodes might be in new and old, so if we found a node
1677                            in both sets, this one  is NOT removed */
1678                         for (j = 0; j < new_num_entries; ++j) {
1679                                 if (old_node_array[i] == new_node_array[j])
1680                                         break;
1681                         }  /* for */
1682                         if (j >= new_num_entries) {
1683                                 int xopt = opt;
1684
1685                                 /* sometimes we did not detect, that it is replaced by a Const */
1686                                 if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) {
1687                                         ir_op *op = get_irn_op(new_node_array[0]);
1688
1689                                         if (op == op_Const || op == op_SymConst)
1690                                                 xopt = HOOK_OPT_CONFIRM_C;
1691                                 }  /* if */
1692
1693                                 removed_due_opt(old_node_array[i], graph->opt_hash[xopt]);
1694                         }  /* if */
1695                 }  /* for */
1696         }
1697         STAT_LEAVE;
1698 }  /* stat_merge_nodes */
1699
1700 /**
1701  * Hook: Reassociation is started/stopped.
1702  *
1703  * @param ctx   the hook context
1704  * @param flag  if non-zero, reassociation is started else stopped
1705  */
1706 static void stat_reassociate(void *ctx, int flag) {
1707         (void) ctx;
1708         if (! status->stat_options)
1709                 return;
1710
1711         STAT_ENTER;
1712         {
1713                 status->reassoc_run = flag;
1714         }
1715         STAT_LEAVE;
1716 }  /* stat_reassociate */
1717
1718 /**
1719  * Hook: A node was lowered into other nodes
1720  *
1721  * @param ctx  the hook context
1722  * @param node the IR node that will be lowered
1723  */
1724 static void stat_lower(void *ctx, ir_node *node) {
1725         (void) ctx;
1726         if (! status->stat_options)
1727                 return;
1728
1729         STAT_ENTER;
1730         {
1731                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1732
1733                 removed_due_opt(node, graph->opt_hash[HOOK_LOWERED]);
1734         }
1735         STAT_LEAVE;
1736 }  /* stat_lower */
1737
1738 /**
1739  * Hook: A graph was inlined.
1740  *
1741  * @param ctx  the hook context
1742  * @param call the IR call that will re changed into the body of
1743  *             the called IR graph
1744  * @param called_irg  the IR graph representing the called routine
1745  */
1746 static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg)
1747 {
1748         (void) ctx;
1749         if (! status->stat_options)
1750                 return;
1751
1752         STAT_ENTER;
1753         {
1754                 ir_graph *irg = get_irn_irg(call);
1755                 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
1756                 graph_entry_t *graph   = graph_get_entry(irg, status->irg_hash);
1757
1758                 cnt_inc(&graph->cnt[gcnt_acc_got_inlined]);
1759                 cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]);
1760         }
1761         STAT_LEAVE;
1762 }  /* stat_inline */
1763
1764 /**
1765  * Hook: A graph with tail-recursions was optimized.
1766  *
1767  * @param ctx  the hook context
1768  */
1769 static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) {
1770         (void) ctx;
1771         if (! status->stat_options)
1772                 return;
1773
1774         STAT_ENTER;
1775         {
1776                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1777
1778                 graph->num_tail_recursion += n_calls;
1779         }
1780         STAT_LEAVE;
1781 }  /* stat_tail_rec */
1782
1783 /**
1784  * Strength reduction was performed on an iteration variable.
1785  *
1786  * @param ctx  the hook context
1787  */
1788 static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong) {
1789         (void) ctx;
1790         if (! status->stat_options)
1791                 return;
1792
1793         STAT_ENTER;
1794         {
1795                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1796                 cnt_inc(&graph->cnt[gcnt_acc_strength_red]);
1797
1798                 removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED]);
1799         }
1800         STAT_LEAVE;
1801 }  /* stat_strength_red */
1802
1803 /**
1804  * Hook: Start/Stop the dead node elimination.
1805  *
1806  * @param ctx  the hook context
1807  */
1808 static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start) {
1809         (void) ctx;
1810         (void) irg;
1811         if (! status->stat_options)
1812                 return;
1813
1814         status->in_dead_node_elim = (start != 0);
1815 }  /* stat_dead_node_elim */
1816
1817 /**
1818  * Hook: if-conversion was tried.
1819  */
1820 static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi,
1821                                int pos, ir_node *mux, if_result_t reason)
1822 {
1823         (void) context;
1824         (void) phi;
1825         (void) pos;
1826         (void) mux;
1827         if (! status->stat_options)
1828                 return;
1829
1830         STAT_ENTER;
1831         {
1832                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1833
1834                 cnt_inc(&graph->cnt[gcnt_if_conv + reason]);
1835         }
1836         STAT_LEAVE;
1837 }  /* stat_if_conversion */
1838
1839 /**
1840  * Hook: real function call was optimized.
1841  */
1842 static void stat_func_call(void *context, ir_graph *irg, ir_node *call)
1843 {
1844         (void) context;
1845         (void) call;
1846         if (! status->stat_options)
1847                 return;
1848
1849         STAT_ENTER;
1850         {
1851                 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
1852
1853                 cnt_inc(&graph->cnt[gcnt_acc_real_func_call]);
1854         }
1855         STAT_LEAVE;
1856 }  /* stat_func_call */
1857
1858 /**
1859  * Hook: A multiply was replaced by a series of Shifts/Adds/Subs.
1860  *
1861  * @param ctx  the hook context
1862  */
1863 static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul) {
1864         (void) ctx;
1865         if (! status->stat_options)
1866                 return;
1867
1868         STAT_ENTER;
1869         {
1870                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1871                 removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP]);
1872         }
1873         STAT_LEAVE;
1874 }  /* stat_arch_dep_replace_mul_with_shifts */
1875
1876 /**
1877  * Hook: A division by const was replaced.
1878  *
1879  * @param ctx   the hook context
1880  * @param node  the division node that will be optimized
1881  */
1882 static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node) {
1883         (void) ctx;
1884         if (! status->stat_options)
1885                 return;
1886
1887         STAT_ENTER;
1888         {
1889                 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
1890                 removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP]);
1891         }
1892         STAT_LEAVE;
1893 }  /* stat_arch_dep_replace_division_by_const */
1894
1895 /*
1896  * Update the register pressure of a block.
1897  *
1898  * @param irg        the irg containing the block
1899  * @param block      the block for which the reg pressure should be set
1900  * @param pressure   the pressure
1901  * @param class_name the name of the register class
1902  */
1903 void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, const char *class_name)
1904 {
1905         if (! status->stat_options)
1906                 return;
1907
1908         STAT_ENTER;
1909         {
1910                 graph_entry_t        *graph = graph_get_entry(irg, status->irg_hash);
1911                 be_block_entry_t     *block_ent;
1912                 reg_pressure_entry_t *rp_ent;
1913
1914                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1915                 rp_ent    = obstack_alloc(&status->be_data, sizeof(*rp_ent));
1916                 memset(rp_ent, 0, sizeof(*rp_ent));
1917
1918                 rp_ent->class_name = class_name;
1919                 rp_ent->pressure   = pressure;
1920
1921                 pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name));
1922         }
1923         STAT_LEAVE;
1924 }  /* stat_be_block_regpressure */
1925
1926 /**
1927  * Update the distribution of ready nodes of a block
1928  *
1929  * @param irg        the irg containing the block
1930  * @param block      the block for which the reg pressure should be set
1931  * @param num_ready  the number of ready nodes
1932  */
1933 void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready)
1934 {
1935         if (! status->stat_options)
1936                 return;
1937
1938         STAT_ENTER;
1939         {
1940                 graph_entry_t    *graph = graph_get_entry(irg, status->irg_hash);
1941                 be_block_entry_t *block_ent;
1942
1943                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1944
1945                 /* increase the counter of corresponding number of ready nodes */
1946                 stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready);
1947         }
1948         STAT_LEAVE;
1949 }  /* stat_be_block_sched_ready */
1950
1951 /**
1952  * Update the permutation statistic of a block.
1953  *
1954  * @param class_name the name of the register class
1955  * @param n_regs     number of registers in the register class
1956  * @param perm       the perm node
1957  * @param block      the block containing the perm
1958  * @param size       the size of the perm
1959  * @param real_size  number of pairs with different registers
1960  */
1961 void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ir_node *block,
1962                              int size, int real_size)
1963 {
1964         if (! status->stat_options)
1965                 return;
1966
1967         STAT_ENTER;
1968         {
1969                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
1970                 be_block_entry_t   *block_ent;
1971                 perm_class_entry_t *pc_ent;
1972                 perm_stat_entry_t  *ps_ent;
1973
1974                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
1975                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
1976                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
1977
1978                 pc_ent->n_regs = n_regs;
1979
1980                 /* update information */
1981                 ps_ent->size      = size;
1982                 ps_ent->real_size = real_size;
1983         }
1984         STAT_LEAVE;
1985 }  /* stat_be_block_stat_perm */
1986
1987 /**
1988  * Update the permutation statistic of a single perm.
1989  *
1990  * @param class_name the name of the register class
1991  * @param perm       the perm node
1992  * @param block      the block containing the perm
1993  * @param is_chain   1 if chain, 0 if cycle
1994  * @param size       length of the cycle/chain
1995  * @param n_ops      the number of ops representing this cycle/chain after lowering
1996  */
1997 void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node *block,
1998                                   int is_chain, int size, int n_ops)
1999 {
2000         if (! status->stat_options)
2001                 return;
2002
2003         STAT_ENTER;
2004         {
2005                 graph_entry_t      *graph = graph_get_entry(get_irn_irg(block), status->irg_hash);
2006                 be_block_entry_t   *block_ent;
2007                 perm_class_entry_t *pc_ent;
2008                 perm_stat_entry_t  *ps_ent;
2009
2010                 block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash);
2011                 pc_ent    = perm_class_get_entry(&status->be_data, class_name, block_ent->perm_class_stat);
2012                 ps_ent    = perm_stat_get_entry(&status->be_data, perm, pc_ent->perm_stat);
2013
2014                 if (is_chain) {
2015                         ps_ent->n_copies += n_ops;
2016                         stat_inc_int_distrib_tbl(ps_ent->chains, size);
2017                 } else {
2018                         ps_ent->n_exchg += n_ops;
2019                         stat_inc_int_distrib_tbl(ps_ent->cycles, size);
2020                 }  /* if */
2021         }
2022         STAT_LEAVE;
2023 }  /* stat_be_block_stat_permcycle */
2024
2025 /* Dumps a statistics snapshot. */
2026 void stat_dump_snapshot(const char *name, const char *phase)
2027 {
2028         char fname[2048];
2029         const char *p;
2030         int l;
2031
2032         if (! status->stat_options)
2033                 return;
2034
2035         STAT_ENTER;
2036         {
2037                 graph_entry_t *entry;
2038                 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
2039
2040                 /*
2041                  * The constant counter is only global, so we clear it here.
2042                  * Note that it does NOT contain the constants in DELETED
2043                  * graphs due to this.
2044                  */
2045                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2046                         stat_const_clear(status);
2047
2048                 /* build the name */
2049                 p = strrchr(name, '/');
2050 #ifdef _WIN32
2051                 {
2052                         const char *q;
2053
2054                         q = strrchr(name, '\\');
2055
2056                         /* NULL might be not the smallest pointer */
2057                         if (q && (!p || q > p))
2058                                 p = q;
2059                 }
2060 #endif /* _WIN32 */
2061                 if (p) {
2062                         ++p;
2063                         l = p - name;
2064
2065                         if (l > (int) (sizeof(fname) - 1))
2066                                 l = sizeof(fname) - 1;
2067
2068                         memcpy(fname, name, l);
2069                         fname[l] = '\0';
2070                 } else {
2071                         fname[0] = '\0';
2072                         p = name;
2073                 }  /* if */
2074                 strncat(fname, "firmstat-", sizeof(fname));
2075                 strncat(fname, phase, sizeof(fname));
2076                 strncat(fname, "-", sizeof(fname));
2077                 strncat(fname, p, sizeof(fname));
2078
2079                 stat_dump_init(fname);
2080
2081                 /* calculate the graph statistics */
2082                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2083                         if (entry->irg == NULL) {
2084                                 /* special entry for the global count */
2085                                 continue;
2086                         }  /* if */
2087                         if (! entry->is_deleted) {
2088                                 /* the graph is still alive, count the nodes on it */
2089                                 update_graph_stat(global, entry);
2090                         }  /* if */
2091                 }  /* for */
2092
2093                 /* some calculations are dependent, we pushed them on the wait_q */
2094                 while (! pdeq_empty(status->wait_q)) {
2095                         entry = pdeq_getr(status->wait_q);
2096
2097                         update_graph_stat_2(global, entry);
2098                 }  /* while */
2099
2100                 /* dump per graph */
2101                 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
2102                         if (entry->irg == NULL) {
2103                                 /* special entry for the global count */
2104                                 continue;
2105                         }  /* if */
2106
2107                         if (! entry->is_deleted || status->stat_options & FIRMSTAT_COUNT_DELETED) {
2108                                 stat_dump_graph(entry);
2109                                 stat_dump_registered(entry);
2110                         }  /* if */
2111
2112                         if (! entry->is_deleted) {
2113                                 /* clear the counter that are not accumulated */
2114                                 graph_clear_entry(entry, 0);
2115                         }  /* if */
2116                 }  /* for */
2117
2118                 /* dump global */
2119                 stat_dump_graph(global);
2120
2121                 /* dump the const info */
2122                 if (status->stat_options & FIRMSTAT_COUNT_CONSTS)
2123                         stat_dump_consts(&status->const_info);
2124
2125                 /* dump the parameter distribution */
2126                 stat_dump_param_tbl(status->dist_param_cnt, global);
2127
2128                 /* dump the optimization counter and clear them */
2129                 stat_dump_opt_cnt(status->num_opts, ARR_SIZE(status->num_opts));
2130                 clear_optimization_counter();
2131
2132                 stat_dump_finish();
2133
2134                 stat_finish_pattern_history(fname);
2135
2136                 /* clear the global counter here */
2137                 {
2138                         node_entry_t *entry;
2139
2140                         for (entry = pset_first(global->opcode_hash); entry; entry = pset_next(global->opcode_hash)) {
2141                                 opcode_clear_entry(entry);
2142                         }  /* for */
2143                         /* clear all global counter */
2144                         graph_clear_entry(global, 1);
2145                 }
2146         }
2147         STAT_LEAVE;
2148 }  /* stat_dump_snapshot */
2149
2150 /** the hook entries for the Firm statistics module */
2151 static hook_entry_t stat_hooks[hook_last];
2152
2153 /* initialize the statistics module. */
2154 void firm_init_stat(unsigned enable_options)
2155 {
2156 #define X(a)  a, sizeof(a)-1
2157 #define HOOK(h, fkt) \
2158         stat_hooks[h].hook._##h = fkt; register_hook(h, &stat_hooks[h])
2159         unsigned num = 0;
2160
2161         if (! (enable_options & FIRMSTAT_ENABLED))
2162                 return;
2163
2164         status = xmalloc(sizeof(*status));
2165         memset(status, 0, sizeof(*status));
2166
2167         /* enable statistics */
2168         status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0;
2169
2170         /* register all hooks */
2171         HOOK(hook_new_ir_op,                          stat_new_ir_op);
2172         HOOK(hook_free_ir_op,                         stat_free_ir_op);
2173         HOOK(hook_new_node,                           stat_new_node);
2174         HOOK(hook_turn_into_id,                       stat_turn_into_id);
2175         HOOK(hook_new_graph,                          stat_new_graph);
2176         HOOK(hook_free_graph,                         stat_free_graph);
2177         HOOK(hook_irg_walk,                           stat_irg_walk);
2178         HOOK(hook_irg_walk_blkwise,                   stat_irg_walk_blkwise);
2179         HOOK(hook_irg_block_walk,                     stat_irg_block_walk);
2180         HOOK(hook_merge_nodes,                        stat_merge_nodes);
2181         HOOK(hook_reassociate,                        stat_reassociate);
2182         HOOK(hook_lower,                              stat_lower);
2183         HOOK(hook_inline,                             stat_inline);
2184         HOOK(hook_tail_rec,                           stat_tail_rec);
2185         HOOK(hook_strength_red,                       stat_strength_red);
2186         HOOK(hook_dead_node_elim,                     stat_dead_node_elim);
2187         HOOK(hook_if_conversion,                      stat_if_conversion);
2188         HOOK(hook_func_call,                          stat_func_call);
2189         HOOK(hook_arch_dep_replace_mul_with_shifts,   stat_arch_dep_replace_mul_with_shifts);
2190         HOOK(hook_arch_dep_replace_division_by_const, stat_arch_dep_replace_division_by_const);
2191
2192         obstack_init(&status->cnts);
2193         obstack_init(&status->be_data);
2194
2195         /* create the hash-tables */
2196         status->irg_hash   = new_pset(graph_cmp, 8);
2197         status->ir_op_hash = new_pset(opcode_cmp_2, 1);
2198
2199         /* create the wait queue */
2200         status->wait_q     = new_pdeq();
2201
2202         if (enable_options & FIRMSTAT_COUNT_STRONG_OP) {
2203                 /* build the pseudo-ops */
2204
2205                 _op_Phi0.code    = --num;
2206                 _op_Phi0.name    = new_id_from_chars(X("Phi0"));
2207
2208                 _op_PhiM.code    = --num;
2209                 _op_PhiM.name    = new_id_from_chars(X("PhiM"));
2210
2211                 _op_ProjM.code   = --num;
2212                 _op_ProjM.name   = new_id_from_chars(X("ProjM"));
2213
2214                 _op_MulC.code    = --num;
2215                 _op_MulC.name    = new_id_from_chars(X("MulC"));
2216
2217                 _op_DivC.code    = --num;
2218                 _op_DivC.name    = new_id_from_chars(X("DivC"));
2219
2220                 _op_ModC.code    = --num;
2221                 _op_ModC.name    = new_id_from_chars(X("ModC"));
2222
2223                 _op_DivModC.code = --num;
2224                 _op_DivModC.name = new_id_from_chars(X("DivModC"));
2225
2226                 status->op_Phi0    = &_op_Phi0;
2227                 status->op_PhiM    = &_op_PhiM;
2228                 status->op_ProjM   = &_op_ProjM;
2229                 status->op_MulC    = &_op_MulC;
2230                 status->op_DivC    = &_op_DivC;
2231                 status->op_ModC    = &_op_ModC;
2232                 status->op_DivModC = &_op_DivModC;
2233         } else {
2234                 status->op_Phi0    = NULL;
2235                 status->op_PhiM    = NULL;
2236                 status->op_ProjM   = NULL;
2237                 status->op_MulC    = NULL;
2238                 status->op_DivC    = NULL;
2239                 status->op_ModC    = NULL;
2240                 status->op_DivModC = NULL;
2241         }  /* if */
2242
2243         /* for Florian: count the Sel depth */
2244         if (enable_options & FIRMSTAT_COUNT_SELS) {
2245                 _op_SelSel.code    = --num;
2246                 _op_SelSel.name    = new_id_from_chars(X("Sel(Sel)"));
2247
2248                 _op_SelSelSel.code = --num;
2249                 _op_SelSelSel.name = new_id_from_chars(X("Sel(Sel(Sel))"));
2250
2251                 status->op_SelSel    = &_op_SelSel;
2252                 status->op_SelSelSel = &_op_SelSelSel;
2253         } else {
2254                 status->op_SelSel    = NULL;
2255                 status->op_SelSelSel = NULL;
2256         }  /* if */
2257
2258         /* register the dumper */
2259         stat_register_dumper(&simple_dumper);
2260
2261         if (enable_options & FIRMSTAT_CSV_OUTPUT)
2262                 stat_register_dumper(&csv_dumper);
2263
2264         /* initialize the pattern hash */
2265         stat_init_pattern_history(enable_options & FIRMSTAT_PATTERN_ENABLED);
2266
2267         /* initialize the Const options */
2268         if (enable_options & FIRMSTAT_COUNT_CONSTS)
2269                 stat_init_const_cnt(status);
2270
2271         /* distribution table for parameter counts */
2272         status->dist_param_cnt = stat_new_int_distrib_tbl();
2273
2274         clear_optimization_counter();
2275
2276 #undef HOOK
2277 #undef X
2278 }  /* firm_init_stat */
2279
2280 /**
2281  * Frees all dumper structures.
2282  */
2283 static void stat_term_dumper(void) {
2284         dumper_t *dumper, *next_dumper;
2285
2286         for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) {
2287                 if (dumper->func_map)
2288                         del_pset(dumper->func_map);
2289
2290                 next_dumper = dumper->next;
2291                 free(dumper);
2292                 dumper = next_dumper;
2293         }  /* for */
2294 }  /* stat_term_dumper */
2295
2296
2297 /* Terminates the statistics module, frees all memory. */
2298 void stat_term(void) {
2299         if (status != (stat_info_t *)&status_disable) {
2300                 obstack_free(&status->be_data, NULL);
2301                 obstack_free(&status->cnts, NULL);
2302
2303                 stat_term_dumper();
2304
2305                 xfree(status);
2306                 status = (stat_info_t *)&status_disable;
2307         }
2308 }  /* stat_term */
2309
2310 /* returns 1 if statistics were initialized, 0 otherwise */
2311 int stat_is_active(void) {
2312         return status != (stat_info_t *)&status_disable;
2313 }  /* stat_is_active */
2314
2315 #else
2316
2317 /* initialize the statistics module. */
2318 void firm_init_stat(unsigned enable_options) {}
2319
2320 /* Dumps a statistics snapshot */
2321 void stat_dump_snapshot(const char *name, const char *phase) {}
2322
2323 /* terminates the statistics module, frees all memory */
2324 void stat_term(void);
2325
2326 #endif /* FIRM_STATISTICS */