3 * File name: ir/ir/firmstat.c
4 * Purpose: Statistics for Firm.
8 * Copyright: (c) 2004 Universität Karlsruhe
9 * Licence: This file protected by GPL - GNU GENERAL PUBLIC LICENSE.
19 # include "irnode_t.h"
20 # include "irgraph_t.h"
28 #undef obstack_chunk_alloc
29 #undef obstack_chunk_free
30 #define obstack_chunk_alloc malloc
31 #define obstack_chunk_free free
34 #ifdef FIRM_STATISTICS
39 * just be make some things clear :-), the
42 #define HASH_MAP(type) pset_##type
44 typedef pset pset_node_entry_t;
45 typedef pset pset_graph_entry_t;
46 typedef pset pset_opt_entry_t;
49 * 32 bit should be enough for now
51 #define STAT_CNT_NUM 1
53 typedef struct _counter_t {
54 unsigned cnt[STAT_CNT_NUM];
58 * An entry for ir_nodes
60 typedef struct _node_entry_t {
61 counter_t cnt_alive; /**< amount of nodes in this entry */
62 counter_t new_node; /**< amount of new nodes for this entry */
63 counter_t into_Id; /**< amount of nodes that turned into Id's for this entry */
64 const ir_op *op; /**< the op for this entry */
68 * An entry for ir_graphs
70 typedef struct _graph_entry_t {
71 HASH_MAP(node_entry_t) *opcode_hash; /**< hash map containing the opcode counter */
72 counter_t cnt_walked; /**< walker walked over the graph */
73 counter_t cnt_walked_blocks; /**< walker walked over the graph blocks */
74 counter_t cnt_was_inlined; /**< number of times other graph were inlined */
75 counter_t cnt_got_inlined; /**< number of times this graph was inlined */
76 counter_t cnt_edges; /**< number of DF edges in this graph */
77 HASH_MAP(opt_entry_t) *opt_hash[STAT_OPT_MAX]; /**< hash maps containing opcode counter for optimizations */
78 ir_graph *irg; /**< the graph of this object */
79 entity *ent; /**< the entity of this graph if one exists */
80 int deleted; /**< set if this irg was deleted */
84 * An entry for optimized ir_nodes
86 typedef struct _opt_entry_t {
87 counter_t count; /**< optimization counter */
88 const ir_op *op; /**< the op for this entry */
92 typedef struct _dumper_t dumper_t;
95 * handler for dumping an IRG
97 * @param dmp the dumper
98 * @param entry the IR-graph hash map entry
100 typedef void (*dump_graph_FUNC)(dumper_t *dmp, graph_entry_t *entry);
103 * handler for dumper init
105 * @param dmp the dumper
106 * @param name name of the file to dump to
108 typedef void (*dump_init_FUNC)(dumper_t *dmp, const char *name);
111 * handler for dumper finish
113 * @param dmp the dumper
115 typedef void (*dump_finish_FUNC)(dumper_t *dmp);
119 * a dumper description
122 dump_graph_FUNC dump_graph; /**< handler for dumping an irg */
123 dump_init_FUNC init; /**< handler for init */
124 dump_finish_FUNC finish; /**< handler for finish */
125 FILE *f; /**< the file to dump to */
126 dumper_t *next; /**< link to the next dumper */
132 typedef struct _statistic_info_t {
133 struct obstack cnts; /**< obstack containing the counters */
134 HASH_MAP(graph_entry_t) *irg_hash; /**< hash map containing the counter for irgs */
135 int recursive; /**< flag for detecting recursive hook calls */
136 int in_dead_node_elim; /**< set, if dead node elimination runs */
137 ir_op *op_Phi0; /**< needed pseudo op */
138 ir_op *op_PhiM; /**< needed pseudo op */
139 dumper_t *dumper; /**< list of dumper */
143 * names of the optimizations
145 static const char *opt_names[] = {
146 "straightening optimization",
148 "algebraic simplification",
150 "Write-After-Write optimization",
151 "Write-After-Read optimization",
152 "Tuple optimization",
154 "Constant evaluation",
159 * need this to be static
161 static ir_op _op_Phi0, _op_PhiM;
163 /* ---------------------------------------------------------------------------------- */
165 #define STAT_ENTER ++status->recursive
166 #define STAT_LEAVE --status->recursive
167 #define STAT_ENTER_SINGLE do { if (status->recursive > 0) return; ++status->recursive; } while (0)
172 static stat_info_t _status, *status = &_status;
177 static INLINE void cnt_inc(counter_t *cnt)
181 for (i = 0; i < STAT_CNT_NUM; ++i) {
190 static INLINE void cnt_dec(counter_t *cnt)
194 for (i = 0; i < STAT_CNT_NUM; ++i) {
195 if (--cnt->cnt[i] != -1)
201 * set a counter to zero
203 static INLINE void cnt_clr(counter_t *cnt)
205 memset(cnt->cnt, 0, sizeof(cnt->cnt));
209 * add a counter to another
211 static inline void cnt_add(counter_t *dst, const counter_t *src)
215 for (i = 0; i < STAT_CNT_NUM; ++i) {
216 unsigned a = dst->cnt[i] + src->cnt[i] + carry;
219 carry = a <= dst->cnt[i];
221 carry = a < dst->cnt[i];
231 * add an integer to an counter
233 static inline void cnt_add_i(counter_t *dst, int src)
236 unsigned a = dst->cnt[0] + src;
237 unsigned carry = a < dst->cnt[i];
243 for (i = 1; i < STAT_CNT_NUM; ++i) {
244 unsigned a = dst->cnt[i] + carry;
246 carry = a < dst->cnt[i];
256 * compare two elements of the opcode hash
258 static int opcode_cmp(const void *elt, const void *key)
260 const node_entry_t *e1 = elt;
261 const node_entry_t *e2 = key;
263 return e1->op->code - e2->op->code;
267 * compare two elements of the graph hash
269 static int graph_cmp(const void *elt, const void *key)
271 const graph_entry_t *e1 = elt;
272 const graph_entry_t *e2 = key;
274 return e1->irg != e2->irg;
278 * compare two elements of the optimization hash
280 static int opt_cmp(const void *elt, const void *key)
282 const opt_entry_t *e1 = elt;
283 const opt_entry_t *e2 = key;
285 return e1->op->code != e2->op->code;
289 * Returns the associates node_entry_t for an ir_op
291 static node_entry_t *opcode_get_entry(const ir_op *op, pset *set)
298 elem = pset_find(set, &key, op->code);
302 elem = obstack_alloc(&status->cnts, sizeof(*elem));
305 cnt_clr(&elem->cnt_alive);
306 cnt_clr(&elem->new_node);
307 cnt_clr(&elem->into_Id);
311 return pset_insert(set, elem, op->code);
315 * calculates a hash value for an irg
316 * Addresses are typically aligned at 32bit, so we ignore the lowest bits
318 static INLINE unsigned irg_hash(const ir_graph *irg)
320 return (unsigned)irg >> 3;
324 * Returns the acssociates graph_entry_t for an irg
326 static graph_entry_t *graph_get_entry(ir_graph *irg, pset *set)
334 elem = pset_find(set, &key, irg_hash(irg));
338 elem = obstack_alloc(&status->cnts, sizeof(*elem));
340 cnt_clr(&elem->cnt_walked);
341 cnt_clr(&elem->cnt_walked_blocks);
342 cnt_clr(&elem->cnt_got_inlined);
343 cnt_clr(&elem->cnt_was_inlined);
344 cnt_clr(&elem->cnt_edges);
346 /* new hash table for opcodes here */
347 elem->opcode_hash = new_pset(opcode_cmp, 5);
350 for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i)
351 elem->opt_hash[i] = new_pset(opt_cmp, 4);
353 return pset_insert(set, elem, irg_hash(irg));
357 * Returns the associates opt_entry_t for an ir_op
359 static opt_entry_t *opt_get_entry(const ir_op *op, pset *set)
366 elem = pset_find(set, &key, op->code);
370 elem = obstack_alloc(&status->cnts, sizeof(*elem));
372 /* clear new counter */
373 cnt_clr(&elem->count);
377 return pset_insert(set, elem, op->code);
381 * Returns the ir_op for an IR-node,
382 * handles special cases and return pseudo op codes
384 static ir_op *stat_get_irn_op(const ir_node *node)
386 ir_op *op = get_irn_op(node);
388 if (op->code == iro_Phi && get_irn_arity(node) == 0) {
389 /* special case, a Phi0 node, count on extra counter */
390 op = status->op_Phi0;
392 else if (op->code == iro_Phi && get_irn_mode(node) == mode_M) {
393 /* special case, a Memory Phi node, count on extra counter */
394 op = status->op_PhiM;
401 * environment for the count walker
403 typedef struct _cnt_env_t {
404 pset *set; /**< the hash map containing the ir_ops */
405 counter_t *cnt_edges; /**< the edges counter */
409 * walker for reachable nodes count
411 static void count_nodes(ir_node *node, void *env)
413 cnt_env_t *cenv = env;
415 ir_op *op = stat_get_irn_op(node);
416 int arity = get_irn_arity(node);
418 entry = opcode_get_entry(op, cenv->set);
420 cnt_inc(&entry->cnt_alive);
421 cnt_add_i(cenv->cnt_edges, arity);
425 * count all alive nodes in a graph
427 static void count_nodes_in_graph(graph_entry_t * global, graph_entry_t * graph)
432 env.set = graph->opcode_hash;
433 env.cnt_edges = &graph->cnt_edges;
435 irg_walk_graph(graph->irg, count_nodes, NULL, &env);
437 /* assume we walk every graph only ONCE, we could sum here the global count */
438 for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) {
439 node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
441 /* update the node counter */
442 cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
445 /* update the edge counter */
446 cnt_add(&global->cnt_edges, &graph->cnt_edges);
452 static void stat_register_dumper(dumper_t *dumper, const char *name)
454 dumper->next = status->dumper;
455 status->dumper = dumper;
458 dumper->init(dumper, name);
464 static void dump_graph(graph_entry_t *entry)
468 for (dumper = status->dumper; dumper; dumper = dumper->next) {
469 if (dumper->dump_graph)
470 dumper->dump_graph(dumper, entry);
477 static void dump_finish(void)
481 for (dumper = status->dumper; dumper; dumper = dumper->next) {
483 dumper->finish(dumper);
487 /* ---------------------------------------------------------------------- */
490 * dumps a opcode hash into human readable form
492 static void simple_dump_opcode_hash(dumper_t *dmp, pset *set)
496 counter_t f_new_node;
500 cnt_clr(&f_new_node);
503 fprintf(dmp->f, "%-16s %-8s %-8s %-8s\n", "Opcode", "alive", "created", "->Id");
504 for (entry = pset_first(set); entry; entry = pset_next(set)) {
505 fprintf(dmp->f, "%-16s %8d %8d %8d\n",
506 get_id_str(entry->op->name), entry->cnt_alive.cnt[0], entry->new_node.cnt[0], entry->into_Id.cnt[0]);
508 cnt_add(&f_alive, &entry->cnt_alive);
509 cnt_add(&f_new_node, &entry->new_node);
510 cnt_add(&f_Id, &entry->into_Id);
512 fprintf(dmp->f, "-------------------------------------------\n");
513 fprintf(dmp->f, "%-16s %8d %8d %8d\n", "Sum",
520 * dumps a optimization hash into human readable form
522 static void simple_dump_opt_hash(dumper_t *dmp, pset *set, int index)
524 opt_entry_t *entry = pset_first(set);
527 fprintf(dmp->f, "\n%s:\n", opt_names[index]);
528 fprintf(dmp->f, "%-16s %-8s\n", "Opcode", "deref");
530 for (; entry; entry = pset_next(set)) {
531 fprintf(dmp->f, "%-16s %8d\n",
532 get_id_str(entry->op->name), entry->count.cnt[0]);
538 * dumps the endges count
540 static void simple_dump_edges(dumper_t *dmp, counter_t *cnt)
542 fprintf(dmp->f, "%-16s %8d\n", "Edges", cnt->cnt[0]);
548 static void simple_dump_graph(dumper_t *dmp, graph_entry_t *entry)
553 ir_graph *const_irg = get_const_code_irg();
555 if (entry->irg == const_irg) {
556 fprintf(dmp->f, "\nConst code Irg %p", (void *)entry->irg);
560 fprintf(dmp->f, "\nEntity %s, Irg %p", get_entity_name(entry->ent), (void *)entry->irg);
562 fprintf(dmp->f, "\nIrg %p", (void *)entry->irg);
565 fprintf(dmp->f, " %swalked %d over blocks %d was inlined %d got inlined %d:\n",
566 entry->deleted ? "DELETED " : "",
567 entry->cnt_walked.cnt[0], entry->cnt_walked_blocks.cnt[0],
568 entry->cnt_was_inlined.cnt[0],
569 entry->cnt_got_inlined.cnt[0]
573 fprintf(dmp->f, "\nGlobals counts:\n");
577 simple_dump_opcode_hash(dmp, entry->opcode_hash);
578 simple_dump_edges(dmp, &entry->cnt_edges);
580 /* effects of optimizations */
584 for (i = 0; i < sizeof(entry->opt_hash)/sizeof(entry->opt_hash[0]); ++i) {
585 simple_dump_opt_hash(dmp, entry->opt_hash[i], i);
591 * initialise the simple dumper
593 static void simple_init(dumper_t *dmp, const char *name)
595 dmp->f = fopen(name, "w");
599 * finishes the simple dumper
601 static void simple_finish(dumper_t *dmp)
608 * the simple human readable dumper
610 static dumper_t simple_dumper = {
618 /* ---------------------------------------------------------------------- */
621 * count the nodes as needed:
623 * 1 normal (data) Phi's
628 static void csv_count_nodes(graph_entry_t *graph, counter_t cnt[])
633 for (i = 0; i < 4; ++i)
636 for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) {
637 if (entry->op == op_Phi) {
639 cnt_add(&cnt[1], &entry->cnt_alive);
641 else if (entry->op == status->op_PhiM) {
643 cnt_add(&cnt[2], &entry->cnt_alive);
645 else if (entry->op == op_Proj) {
647 cnt_add(&cnt[3], &entry->cnt_alive);
650 /* all other nodes */
651 cnt_add(&cnt[0], &entry->cnt_alive);
659 static void csv_dump_graph(dumper_t *dmp, graph_entry_t *entry)
666 ir_graph *const_irg = get_const_code_irg();
668 if (entry->irg == const_irg) {
669 name = "<Const code Irg>";
674 name = get_entity_name(entry->ent);
676 name = "<UNKNOWN IRG>";
679 csv_count_nodes(entry, cnt);
681 fprintf(dmp->f, "%-40s, %p, %d, %d, %d, %d\n",
693 * initialise the simple dumper
695 static void csv_init(dumper_t *dmp, const char *name)
697 dmp->f = fopen(name, "a");
701 * finishes the simple dumper
703 static void csv_finish(dumper_t *dmp)
710 * the simple human readable dumper
712 static dumper_t csv_dumper = {
721 /* ---------------------------------------------------------------------- */
723 /* initialize the statistics module. */
726 #define X(a) a, sizeof(a)-1
730 obstack_init(&status->cnts);
732 /* build the pseudo-ops */
733 _op_Phi0.code = --pseudo_id;
734 _op_Phi0.name = id_from_str(X("Phi0"));
736 _op_PhiM.code = --pseudo_id;
737 _op_PhiM.name = id_from_str(X("PhiM"));
739 /* create the hash-tables */
740 status->irg_hash = new_pset(graph_cmp, 8);
742 status->op_Phi0 = &_op_Phi0;
743 status->op_PhiM = &_op_PhiM;
745 stat_register_dumper(&simple_dumper, "firmstat.txt");
746 stat_register_dumper(&csv_dumper, "firmstat.csv");
751 /* A new IR op is registered. */
752 void stat_new_ir_op(const ir_op *op)
756 graph_entry_t *graph = graph_get_entry(NULL, status->irg_hash);
758 /* execute for side effect :-) */
759 opcode_get_entry(op, graph->opcode_hash);
764 /* An IR op is freed. */
765 void stat_free_ir_op(const ir_op *op)
773 /* A new node is created. */
774 void stat_new_node(const ir_node *node)
776 /* do NOT count during dead node elimination */
777 if (status->in_dead_node_elim > 0)
783 graph_entry_t *graph;
784 ir_op *op = stat_get_irn_op(node);
786 /* increase global value */
787 graph = graph_get_entry(NULL, status->irg_hash);
788 entry = opcode_get_entry(op, graph->opcode_hash);
789 cnt_inc(&entry->new_node);
791 /* increase local value */
792 graph = graph_get_entry(current_ir_graph, status->irg_hash);
793 entry = opcode_get_entry(op, graph->opcode_hash);
794 cnt_inc(&entry->new_node);
799 /* A node is changed into a Id node */
800 void stat_turn_into_id(const ir_node *node)
805 graph_entry_t *graph;
806 ir_op *op = stat_get_irn_op(node);
808 /* increase global value */
809 graph = graph_get_entry(NULL, status->irg_hash);
810 entry = opcode_get_entry(op, graph->opcode_hash);
811 cnt_inc(&entry->into_Id);
813 /* increase local value */
814 graph = graph_get_entry(current_ir_graph, status->irg_hash);
815 entry = opcode_get_entry(op, graph->opcode_hash);
816 cnt_inc(&entry->into_Id);
821 /* A new graph was created */
822 void stat_new_graph(ir_graph *irg, entity *ent)
826 /* execute for side effect :-) */
827 graph_entry_t * graph = graph_get_entry(irg, status->irg_hash);
836 * A graph was deleted
838 void stat_free_graph(ir_graph *irg)
842 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
843 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
847 /* count the nodes of the graph yet, it will be destroyed later */
848 count_nodes_in_graph(global, graph);
854 * A walk over a graph is initiated. Do not count walks from statistic code.
856 void stat_irg_walk(ir_graph *irg, void *pre, void *post)
860 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
862 cnt_inc(&graph->cnt_walked);
868 * A walk over the graph's blocks is initiated. Do not count walks from statistic code.
870 void stat_irg_block_walk(ir_graph *irg, const ir_node *node, void *pre, void *post)
874 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
876 cnt_inc(&graph->cnt_walked_blocks);
882 * called for every node that is removed due to an optimization
884 static void removed_due_opt(ir_node *n, pset *set)
886 ir_op *op = get_irn_op(n);
887 opt_entry_t *entry = opt_get_entry(op, set);
889 /* increase global value */
890 cnt_inc(&entry->count);
894 * Some nodes were optimized into some others due to an optimization
896 void stat_merge_nodes(
897 ir_node **new_node_array, int new_num_entries,
898 ir_node **old_node_array, int old_num_entries,
904 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
906 for (i = 0; i < old_num_entries; ++i) {
907 for (j = 0; j < new_num_entries; ++j)
908 if (old_node_array[i] == new_node_array[j])
911 /* nodes might be in new and old, these are NOT removed */
912 if (j >= new_num_entries) {
913 removed_due_opt(old_node_array[i], graph->opt_hash[opt]);
921 * A node was lowered into other nodes
923 void stat_lower(ir_node *node)
927 graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash);
929 removed_due_opt(node, graph->opt_hash[STAT_LOWERED]);
935 * A graph was inlined
937 void stat_inline(ir_node *call, ir_graph *called_irg)
941 ir_graph *irg = get_irn_irg(call);
942 graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash);
943 graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
945 cnt_inc(&graph->cnt_got_inlined);
946 cnt_inc(&i_graph->cnt_was_inlined);
952 * Start the dead node elimination.
954 void stat_dead_node_elim_start(ir_graph *irg)
956 ++status->in_dead_node_elim;
960 * Stops the dead node elimination.
962 void stat_dead_node_elim_stop(ir_graph *irg)
964 --status->in_dead_node_elim;
967 /* Finish the statistics */
968 void stat_finish(void)
972 graph_entry_t *entry;
973 graph_entry_t *global = graph_get_entry(NULL, status->irg_hash);
976 for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) {
978 if (entry->irg == NULL) {
979 /* special entry for the global count */
983 if (! entry->deleted) {
984 /* the graph is still alive, count the nodes on it */
985 count_nodes_in_graph(global, entry);
1001 /* need this for prototypes */
1002 #define FIRM_STATISTICS
1003 #include "firmstat.h"
1005 void stat_init(void) {}
1007 void stat_finish(void) {}
1009 void stat_new_ir_op(const ir_op *op) {}
1011 void stat_free_ir_op(const ir_op *op) {}
1013 void stat_new_node(const ir_node *node) {}
1015 void stat_turn_into_id(const ir_node *node) {}
1017 void stat_new_graph(ir_graph *irg, entity *ent) {}
1019 void stat_free_graph(ir_graph *irg) {}
1021 void stat_irg_walk(ir_graph *irg, void *pre, void *post) {}
1023 void stat_irg_block_walk(ir_graph *irg, const ir_node *node, void *pre, void *post) {}
1025 void stat_merge_nodes(
1026 ir_node **new_node_array, int new_num_entries,
1027 ir_node **old_node_array, int old_num_entries,
1028 stat_opt_kind opt) {}
1030 void stat_lower(ir_node *node) {}
1032 void stat_inline(ir_node *call, ir_graph *irg) {}
1034 void stat_dead_node_elim_start(ir_graph *irg) {}
1036 void stat_dead_node_elim_stop(ir_graph *irg) {}