X-Git-Url: http://nsz.repo.hu/git/?a=blobdiff_plain;f=ir%2Fstat%2Ffirmstat.c;h=d897bbf08760cfa341424114d770ef4cde7f10e2;hb=f8cc15664f571aa7ef89d6f6bc8d5bd2b8ca7d53;hp=13bacd3743dd64d116b647d765bfeac459f38bfd;hpb=913726a12709fd431025fdd1f4bc520269cb0372;p=libfirm diff --git a/ir/stat/firmstat.c b/ir/stat/firmstat.c index 13bacd374..d897bbf08 100644 --- a/ir/stat/firmstat.c +++ b/ir/stat/firmstat.c @@ -1,38 +1,44 @@ /* - * Project: libFIRM - * File name: ir/ir/firmstat.c - * Purpose: Statistics for Firm. - * Author: Michael Beck - * Created: - * CVS-ID: $Id$ - * Copyright: (c) 2004 Universität Karlsruhe - * Licence: This file protected by GPL - GNU GENERAL PUBLIC LICENSE. - */ - -#ifdef HAVE_CONFIG_H -# include "config.h" -#endif + * Copyright (C) 1995-2010 University of Karlsruhe. All right reserved. + * + * This file is part of libFirm. + * + * This file may be distributed and/or modified under the terms of the + * GNU General Public License version 2 as published by the Free Software + * Foundation and appearing in the file LICENSE.GPL included in the + * packaging of this file. + * + * Licensees holding valid libFirm Professional Edition licenses may use + * this file in accordance with the libFirm Commercial License. + * Agreement provided with the Software. + * + * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE + * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE. + */ -#ifdef FIRM_STATISTICS +/** + * @file + * @brief Statistics for Firm. + * @author Michael Beck + */ +#include "config.h" #include - -#ifdef HAVE_STDLIB_H -# include -#endif -#ifdef HAVE_STRING_H -# include -#endif +#include +#include #include "irouts.h" #include "irdump.h" #include "hashptr.h" #include "firmstat_t.h" +#include "irpass_t.h" #include "pattern.h" #include "dags.h" #include "stat_dmp.h" #include "xmalloc.h" #include "irhooks.h" +#include "util.h" /* * need this to be static: @@ -56,9 +62,6 @@ static ir_op _op_DivC; /** The Div by Const node. */ static ir_op _op_ModC; -/** The Div by Const node. */ -static ir_op _op_DivModC; - /** The memory Proj node. */ static ir_op _op_ProjM; @@ -71,18 +74,18 @@ static ir_op _op_SelSelSel; /* ---------------------------------------------------------------------------------- */ /** Marks the begin of a statistic (hook) function. */ -#define STAT_ENTER ++status->recursive +#define STAT_ENTER ++status->recursive /** Marks the end of a statistic (hook) functions. */ -#define STAT_LEAVE --status->recursive +#define STAT_LEAVE --status->recursive /** Allows to enter a statistic function only when we are not already in a hook. */ -#define STAT_ENTER_SINGLE do { if (status->recursive > 0) return; ++status->recursive; } while (0) +#define STAT_ENTER_SINGLE do { if (status->recursive > 0) return; ++status->recursive; } while (0) /** * global status */ -static const int status_disable = 0; +static const unsigned status_disable = 0; static stat_info_t *status = (stat_info_t *)&status_disable; /** @@ -90,111 +93,113 @@ static stat_info_t *status = (stat_info_t *)&status_disable; */ static int opcode_cmp(const void *elt, const void *key) { - const node_entry_t *e1 = elt; - const node_entry_t *e2 = key; + const node_entry_t *e1 = (const node_entry_t*)elt; + const node_entry_t *e2 = (const node_entry_t*)key; return e1->op->code - e2->op->code; -} /* opcode_cmp */ +} /* opcode_cmp */ /** * Compare two elements of the graph hash. */ static int graph_cmp(const void *elt, const void *key) { - const graph_entry_t *e1 = elt; - const graph_entry_t *e2 = key; + const graph_entry_t *e1 = (const graph_entry_t*)elt; + const graph_entry_t *e2 = (const graph_entry_t*)key; return e1->irg != e2->irg; -} /* graph_cmp */ +} /* graph_cmp */ /** * Compare two elements of the optimization hash. */ static int opt_cmp(const void *elt, const void *key) { - const opt_entry_t *e1 = elt; - const opt_entry_t *e2 = key; + const opt_entry_t *e1 = (const opt_entry_t*)elt; + const opt_entry_t *e2 = (const opt_entry_t*)key; return e1->op->code != e2->op->code; -} /* opt_cmp */ +} /* opt_cmp */ /** * Compare two elements of the block/extbb hash. */ static int block_cmp(const void *elt, const void *key) { - const block_entry_t *e1 = elt; - const block_entry_t *e2 = key; + const block_entry_t *e1 = (const block_entry_t*)elt; + const block_entry_t *e2 = (const block_entry_t*)key; + /* it's enough to compare the block number */ return e1->block_nr != e2->block_nr; -} /* block_cmp */ +} /* block_cmp */ /** * Compare two elements of the be_block hash. */ static int be_block_cmp(const void *elt, const void *key) { - const be_block_entry_t *e1 = elt; - const be_block_entry_t *e2 = key; + const be_block_entry_t *e1 = (const be_block_entry_t*)elt; + const be_block_entry_t *e2 = (const be_block_entry_t*)key; return e1->block_nr != e2->block_nr; -} /* be_block_cmp */ +} /* be_block_cmp */ /** * Compare two elements of reg pressure hash. */ static int reg_pressure_cmp(const void *elt, const void *key) { - const reg_pressure_entry_t *e1 = elt; - const reg_pressure_entry_t *e2 = key; + const reg_pressure_entry_t *e1 = (const reg_pressure_entry_t*)elt; + const reg_pressure_entry_t *e2 = (const reg_pressure_entry_t*)key; return e1->class_name != e2->class_name; -} /* reg_pressure_cmp */ +} /* reg_pressure_cmp */ /** * Compare two elements of the perm_stat hash. */ static int perm_stat_cmp(const void *elt, const void *key) { - const perm_stat_entry_t *e1 = elt; - const perm_stat_entry_t *e2 = key; + const perm_stat_entry_t *e1 = (const perm_stat_entry_t*)elt; + const perm_stat_entry_t *e2 = (const perm_stat_entry_t*)key; return e1->perm != e2->perm; -} /* perm_stat_cmp */ +} /* perm_stat_cmp */ /** * Compare two elements of the perm_class hash. */ static int perm_class_cmp(const void *elt, const void *key) { - const perm_class_entry_t *e1 = elt; - const perm_class_entry_t *e2 = key; + const perm_class_entry_t *e1 = (const perm_class_entry_t*)elt; + const perm_class_entry_t *e2 = (const perm_class_entry_t*)key; return e1->class_name != e2->class_name; -} /* perm_class_cmp */ +} /* perm_class_cmp */ /** * Compare two elements of the ir_op hash. */ static int opcode_cmp_2(const void *elt, const void *key) { - const ir_op *e1 = elt; - const ir_op *e2 = key; + const ir_op *e1 = (const ir_op*)elt; + const ir_op *e2 = (const ir_op*)key; return e1->code != e2->code; -} /* opcode_cmp_2 */ +} /* opcode_cmp_2 */ /** * Compare two elements of the address_mark set. */ static int address_mark_cmp(const void *elt, const void *key, size_t size) { - const address_mark_entry_t *e1 = elt; - const address_mark_entry_t *e2 = key; + const address_mark_entry_t *e1 = (const address_mark_entry_t*)elt; + const address_mark_entry_t *e2 = (const address_mark_entry_t*)key; + (void) size; /* compare only the nodes, the rest is used as data container */ return e1->node != e2->node; -} /* address_mark_cmp */ +} /* address_mark_cmp */ /** * Clear all counter in a node_entry_t. @@ -204,10 +209,12 @@ static void opcode_clear_entry(node_entry_t *elem) cnt_clr(&elem->cnt_alive); cnt_clr(&elem->new_node); cnt_clr(&elem->into_Id); -} /* opcode_clear_entry */ + cnt_clr(&elem->normalized); +} /* opcode_clear_entry */ /** - * Returns the associates node_entry_t for an ir_op + * Returns the associates node_entry_t for an ir_op (and allocates + * one if not yet available). * * @param op the IR operation * @param hmap a hash map containing ir_op* -> node_entry_t* @@ -219,20 +226,19 @@ static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap) key.op = op; - elem = pset_find(hmap, &key, op->code); + elem = (node_entry_t*)pset_find(hmap, &key, op->code); if (elem) return elem; - elem = obstack_alloc(&status->cnts, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(&status->cnts, node_entry_t); /* clear counter */ opcode_clear_entry(elem); elem->op = op; - return pset_insert(hmap, elem, op->code); -} /* opcode_get_entry */ + return (node_entry_t*)pset_insert(hmap, elem, op->code); +} /* opcode_get_entry */ /** * Returns the associates ir_op for an opcode @@ -240,31 +246,28 @@ static node_entry_t *opcode_get_entry(const ir_op *op, hmap_node_entry_t *hmap) * @param code the IR opcode * @param hmap the hash map containing opcode -> ir_op* */ -static ir_op *opcode_find_entry(opcode code, hmap_ir_op *hmap) +static ir_op *opcode_find_entry(ir_opcode code, hmap_ir_op *hmap) { ir_op key; key.code = code; - return pset_find(hmap, &key, code); -} /* opcode_find_entry */ + return (ir_op*)pset_find(hmap, &key, code); +} /* opcode_find_entry */ /** - * clears all counter in a graph_entry_t + * Clears all counter in a graph_entry_t. + * + * @param elem the graph entry + * @param all if non-zero, clears all counters, else leave accumulated ones */ static void graph_clear_entry(graph_entry_t *elem, int all) { - if (all) { - cnt_clr(&elem->cnt_walked); - cnt_clr(&elem->cnt_walked_blocks); - cnt_clr(&elem->cnt_was_inlined); - cnt_clr(&elem->cnt_got_inlined); - cnt_clr(&elem->cnt_strength_red); - cnt_clr(&elem->cnt_real_func_call); - } /* if */ - cnt_clr(&elem->cnt_edges); - cnt_clr(&elem->cnt_all_calls); - cnt_clr(&elem->cnt_call_with_cnst_arg); - cnt_clr(&elem->cnt_indirect_calls); + int i; + + /* clear accumulated / non-accumulated counter */ + for (i = all ? 0 : _gcnt_non_acc; i < _gcnt_last; ++i) { + cnt_clr(&elem->cnt[i]); + } /* for */ if (elem->block_hash) { del_pset(elem->block_hash); @@ -278,23 +281,23 @@ static void graph_clear_entry(graph_entry_t *elem, int all) obstack_free(&elem->recalc_cnts, NULL); obstack_init(&elem->recalc_cnts); -} /* graph_clear_entry */ +} /* graph_clear_entry */ /** * Returns the associated graph_entry_t for an IR graph. * - * @param irg the IR graph + * @param irg the IR graph, NULL for the global counter * @param hmap the hash map containing ir_graph* -> graph_entry_t* */ static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap) { graph_entry_t key; graph_entry_t *elem; - int i; + size_t i; key.irg = irg; - elem = pset_find(hmap, &key, HASH_PTR(irg)); + elem = (graph_entry_t*)pset_find(hmap, &key, HASH_PTR(irg)); if (elem) { /* create hash map backend block information */ @@ -305,8 +308,7 @@ static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap) } /* if */ /* allocate a new one */ - elem = obstack_alloc(&status->cnts, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(&status->cnts, graph_entry_t); obstack_init(&elem->recalc_cnts); /* clear counter */ @@ -324,8 +326,8 @@ static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap) for (i = 0; i < sizeof(elem->opt_hash)/sizeof(elem->opt_hash[0]); ++i) elem->opt_hash[i] = new_pset(opt_cmp, 4); - return pset_insert(hmap, elem, HASH_PTR(irg)); -} /* graph_get_entry */ + return (graph_entry_t*)pset_insert(hmap, elem, HASH_PTR(irg)); +} /* graph_get_entry */ /** * Clear all counter in an opt_entry_t. @@ -333,7 +335,7 @@ static graph_entry_t *graph_get_entry(ir_graph *irg, hmap_graph_entry_t *hmap) static void opt_clear_entry(opt_entry_t *elem) { cnt_clr(&elem->count); -} /* opt_clear_entry */ +} /* opt_clear_entry */ /** * Returns the associated opt_entry_t for an IR operation. @@ -348,32 +350,30 @@ static opt_entry_t *opt_get_entry(const ir_op *op, hmap_opt_entry_t *hmap) key.op = op; - elem = pset_find(hmap, &key, op->code); + elem = (opt_entry_t*)pset_find(hmap, &key, op->code); if (elem) return elem; - elem = obstack_alloc(&status->cnts, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(&status->cnts, opt_entry_t); /* clear new counter */ opt_clear_entry(elem); elem->op = op; - return pset_insert(hmap, elem, op->code); -} /* opt_get_entry */ + return (opt_entry_t*)pset_insert(hmap, elem, op->code); +} /* opt_get_entry */ /** * clears all counter in a block_entry_t */ static void block_clear_entry(block_entry_t *elem) { - cnt_clr(&elem->cnt_nodes); - cnt_clr(&elem->cnt_edges); - cnt_clr(&elem->cnt_in_edges); - cnt_clr(&elem->cnt_out_edges); - cnt_clr(&elem->cnt_phi_data); -} + int i; + + for (i = 0; i < _bcnt_last; ++i) + cnt_clr(&elem->cnt[i]); +} /* block_clear_entry */ /** * Returns the associated block_entry_t for an block. @@ -388,20 +388,19 @@ static block_entry_t *block_get_entry(struct obstack *obst, long block_nr, hmap_ key.block_nr = block_nr; - elem = pset_find(hmap, &key, block_nr); + elem = (block_entry_t*)pset_find(hmap, &key, block_nr); if (elem) return elem; - elem = obstack_alloc(obst, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(obst, block_entry_t); /* clear new counter */ block_clear_entry(elem); elem->block_nr = block_nr; - return pset_insert(hmap, elem, block_nr); -} /* block_get_entry */ + return (block_entry_t*)pset_insert(hmap, elem, block_nr); +} /* block_get_entry */ /** * Clear all sets in be_block_entry_t. @@ -420,7 +419,7 @@ static void be_block_clear_entry(be_block_entry_t *elem) elem->reg_pressure = new_pset(reg_pressure_cmp, 5); elem->sched_ready = stat_new_int_distrib_tbl(); elem->perm_class_stat = new_pset(perm_class_cmp, 5); -} /* be_block_clear_entry */ +} /* be_block_clear_entry */ /** * Returns the associated be_block_entry_t for an block. @@ -435,30 +434,30 @@ static be_block_entry_t *be_block_get_entry(struct obstack *obst, long block_nr, key.block_nr = block_nr; - elem = pset_find(hmap, &key, block_nr); + elem = (be_block_entry_t*)pset_find(hmap, &key, block_nr); if (elem) return elem; - elem = obstack_alloc(obst, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(obst, be_block_entry_t); /* clear new counter */ be_block_clear_entry(elem); elem->block_nr = block_nr; - return pset_insert(hmap, elem, block_nr); -} /* be_block_get_entry */ + return (be_block_entry_t*)pset_insert(hmap, elem, block_nr); +} /* be_block_get_entry */ /** * clears all sets in perm_class_entry_t */ -static void perm_class_clear_entry(perm_class_entry_t *elem) { +static void perm_class_clear_entry(perm_class_entry_t *elem) +{ if (elem->perm_stat) del_pset(elem->perm_stat); elem->perm_stat = new_pset(perm_stat_cmp, 5); -} +} /* perm_class_clear_entry */ /** * Returns the associated perm_class entry for a register class. @@ -474,25 +473,25 @@ static perm_class_entry_t *perm_class_get_entry(struct obstack *obst, const char key.class_name = class_name; - elem = pset_find(hmap, &key, HASH_PTR(class_name)); + elem = (perm_class_entry_t*)pset_find(hmap, &key, HASH_PTR(class_name)); if (elem) return elem; - elem = obstack_alloc(obst, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(obst, perm_class_entry_t); /* clear new counter */ perm_class_clear_entry(elem); elem->class_name = class_name; - return pset_insert(hmap, elem, HASH_PTR(class_name)); -} /* perm_class_get_entry */ + return (perm_class_entry_t*)pset_insert(hmap, elem, HASH_PTR(class_name)); +} /* perm_class_get_entry */ /** * clears all sets in perm_stat_entry_t */ -static void perm_stat_clear_entry(perm_stat_entry_t *elem) { +static void perm_stat_clear_entry(perm_stat_entry_t *elem) +{ if (elem->chains) stat_delete_distrib_tbl(elem->chains); @@ -501,7 +500,7 @@ static void perm_stat_clear_entry(perm_stat_entry_t *elem) { elem->chains = stat_new_int_distrib_tbl(); elem->cycles = stat_new_int_distrib_tbl(); -} /* perm_stat_clear_entry */ +} /* perm_stat_clear_entry */ /** * Returns the associated perm_stat entry for a perm. @@ -516,20 +515,29 @@ static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *per key.perm = perm; - elem = pset_find(hmap, &key, HASH_PTR(perm)); + elem = (perm_stat_entry_t*)pset_find(hmap, &key, HASH_PTR(perm)); if (elem) return elem; - elem = obstack_alloc(obst, sizeof(*elem)); - memset(elem, 0, sizeof(*elem)); + elem = OALLOCZ(obst, perm_stat_entry_t); /* clear new counter */ perm_stat_clear_entry(elem); elem->perm = perm; - return pset_insert(hmap, elem, HASH_PTR(perm)); -} /* perm_stat_get_entry */ + return (perm_stat_entry_t*)pset_insert(hmap, elem, HASH_PTR(perm)); +} /* perm_stat_get_entry */ + +/** + * Clear optimizations counter, + */ +static void clear_optimization_counter(void) +{ + int i; + for (i = 0; i < FS_OPT_MAX; ++i) + cnt_clr(&status->num_opts[i]); +} /** * Returns the ir_op for an IR-node, @@ -540,41 +548,58 @@ static perm_stat_entry_t *perm_stat_get_entry(struct obstack *obst, ir_node *per static ir_op *stat_get_irn_op(ir_node *node) { ir_op *op = get_irn_op(node); - - if (op == op_Phi && get_irn_arity(node) == 0) { - /* special case, a Phi0 node, count on extra counter */ - op = status->op_Phi0 ? status->op_Phi0 : op; - } else if (op == op_Phi && get_irn_mode(node) == mode_M) { - /* special case, a Memory Phi node, count on extra counter */ - op = status->op_PhiM ? status->op_PhiM : op; - } else if (op == op_Proj && get_irn_mode(node) == mode_M) { - /* special case, a Memory Proj node, count on extra counter */ - op = status->op_ProjM ? status->op_ProjM : op; - } else if (op == op_Mul && - (get_irn_op(get_Mul_left(node)) == op_Const || get_irn_op(get_Mul_right(node)) == op_Const)) { - /* special case, a Multiply by a const, count on extra counter */ - op = status->op_MulC ? status->op_MulC : op; - } else if (op == op_Div && get_irn_op(get_Div_right(node)) == op_Const) { - /* special case, a division by a const, count on extra counter */ - op = status->op_DivC ? status->op_DivC : op; - } else if (op == op_Mod && get_irn_op(get_Mod_right(node)) == op_Const) { - /* special case, a module by a const, count on extra counter */ - op = status->op_ModC ? status->op_ModC : op; - } else if (op == op_DivMod && get_irn_op(get_DivMod_right(node)) == op_Const) { - /* special case, a division/modulo by a const, count on extra counter */ - op = status->op_DivModC ? status->op_DivModC : op; - } else if (op == op_Sel && get_irn_op(get_Sel_ptr(node)) == op_Sel) { - /* special case, a Sel of a Sel, count on extra counter */ - op = status->op_SelSel ? status->op_SelSel : op; - - if (get_irn_op(get_Sel_ptr(get_Sel_ptr(node))) == op_Sel) { - /* special case, a Sel of a Sel of a Sel, count on extra counter */ - op = status->op_SelSelSel ? status->op_SelSelSel : op; + unsigned opc = op->code; + + switch (opc) { + case iro_Phi: + if (get_irn_arity(node) == 0) { + /* special case, a Phi0 node, count on extra counter */ + op = status->op_Phi0 ? status->op_Phi0 : op; + } else if (get_irn_mode(node) == mode_M) { + /* special case, a Memory Phi node, count on extra counter */ + op = status->op_PhiM ? status->op_PhiM : op; } /* if */ - } /* if */ + break; + case iro_Proj: + if (get_irn_mode(node) == mode_M) { + /* special case, a Memory Proj node, count on extra counter */ + op = status->op_ProjM ? status->op_ProjM : op; + } /* if */ + break; + case iro_Mul: + if (is_Const(get_Mul_left(node)) || is_Const(get_Mul_right(node))) { + /* special case, a Multiply by a const, count on extra counter */ + op = status->op_MulC ? status->op_MulC : op; + } /* if */ + break; + case iro_Div: + if (is_Const(get_Div_right(node))) { + /* special case, a division by a const, count on extra counter */ + op = status->op_DivC ? status->op_DivC : op; + } /* if */ + break; + case iro_Mod: + if (is_Const(get_Mod_right(node))) { + /* special case, a module by a const, count on extra counter */ + op = status->op_ModC ? status->op_ModC : op; + } /* if */ + break; + case iro_Sel: + if (is_Sel(get_Sel_ptr(node))) { + /* special case, a Sel of a Sel, count on extra counter */ + op = status->op_SelSel ? status->op_SelSel : op; + if (is_Sel(get_Sel_ptr(get_Sel_ptr(node)))) { + /* special case, a Sel of a Sel of a Sel, count on extra counter */ + op = status->op_SelSelSel ? status->op_SelSelSel : op; + } /* if */ + } /* if */ + break; + default: + break; + } /* switch */ return op; -} /* stat_get_irn_op */ +} /* stat_get_irn_op */ /** * update the block counter @@ -590,6 +615,11 @@ static void undate_block_info(ir_node *node, graph_entry_t *graph) if (op == op_Block) { arity = get_irn_arity(node); b_entry = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(node), graph->block_hash); + /* mark start end block to allow to filter them out */ + if (node == get_irg_start_block(graph->irg)) + b_entry->is_start = 1; + else if (node == get_irg_end_block(graph->irg)) + b_entry->is_end = 1; /* count all incoming edges */ for (i = 0; i < arity; ++i) { @@ -597,8 +627,8 @@ static void undate_block_info(ir_node *node, graph_entry_t *graph) ir_node *other_block = get_nodes_block(pred); block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash); - cnt_inc(&b_entry->cnt_in_edges); /* an edge coming from another block */ - cnt_inc(&b_entry_other->cnt_out_edges); + cnt_inc(&b_entry->cnt[bcnt_in_edges]); /* an edge coming from another block */ + cnt_inc(&b_entry_other->cnt[bcnt_out_edges]); } /* for */ return; } /* if */ @@ -608,14 +638,14 @@ static void undate_block_info(ir_node *node, graph_entry_t *graph) if (op == op_Phi && mode_is_datab(get_irn_mode(node))) { /* count data Phi per block */ - cnt_inc(&b_entry->cnt_phi_data); + cnt_inc(&b_entry->cnt[bcnt_phi_data]); } /* if */ /* we have a new node in our block */ - cnt_inc(&b_entry->cnt_nodes); + cnt_inc(&b_entry->cnt[bcnt_nodes]); /* don't count keep-alive edges */ - if (get_irn_op(node) == op_End) + if (is_End(node)) return; arity = get_irn_arity(node); @@ -627,15 +657,15 @@ static void undate_block_info(ir_node *node, graph_entry_t *graph) other_block = get_nodes_block(pred); if (other_block == block) - cnt_inc(&b_entry->cnt_edges); /* a in block edge */ + cnt_inc(&b_entry->cnt[bcnt_edges]); /* a in block edge */ else { block_entry_t *b_entry_other = block_get_entry(&graph->recalc_cnts, get_irn_node_nr(other_block), graph->block_hash); - cnt_inc(&b_entry->cnt_in_edges); /* an edge coming from another block */ - cnt_inc(&b_entry_other->cnt_out_edges); + cnt_inc(&b_entry->cnt[bcnt_in_edges]); /* an edge coming from another block */ + cnt_inc(&b_entry_other->cnt[bcnt_out_edges]); } /* if */ } /* for */ -} /* undate_block_info */ +} /* undate_block_info */ /** * Update the extended block counter. @@ -661,8 +691,8 @@ static void update_extbb_info(ir_node *node, graph_entry_t *graph) if (extbb != other_extbb) { extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash); - cnt_inc(&eb_entry->cnt_in_edges); /* an edge coming from another extbb */ - cnt_inc(&eb_entry_other->cnt_out_edges); + cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */ + cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]); } /* if */ } /* for */ return; @@ -673,14 +703,14 @@ static void update_extbb_info(ir_node *node, graph_entry_t *graph) if (op == op_Phi && mode_is_datab(get_irn_mode(node))) { /* count data Phi per extbb */ - cnt_inc(&eb_entry->cnt_phi_data); + cnt_inc(&eb_entry->cnt[bcnt_phi_data]); } /* if */ /* we have a new node in our block */ - cnt_inc(&eb_entry->cnt_nodes); + cnt_inc(&eb_entry->cnt[bcnt_nodes]); /* don't count keep-alive edges */ - if (get_irn_op(node) == op_End) + if (is_End(node)) return; arity = get_irn_arity(node); @@ -690,31 +720,52 @@ static void update_extbb_info(ir_node *node, graph_entry_t *graph) ir_extblk *other_extbb = get_nodes_extbb(pred); if (other_extbb == extbb) - cnt_inc(&eb_entry->cnt_edges); /* a in extbb edge */ + cnt_inc(&eb_entry->cnt[bcnt_edges]); /* a in extbb edge */ else { extbb_entry_t *eb_entry_other = block_get_entry(&graph->recalc_cnts, get_extbb_node_nr(other_extbb), graph->extbb_hash); - cnt_inc(&eb_entry->cnt_in_edges); /* an edge coming from another extbb */ - cnt_inc(&eb_entry_other->cnt_out_edges); + cnt_inc(&eb_entry->cnt[bcnt_in_edges]); /* an edge coming from another extbb */ + cnt_inc(&eb_entry_other->cnt[bcnt_out_edges]); } /* if */ } /* for */ -} /* update_extbb_info */ +} /* update_extbb_info */ -/** Calculates how many arguments of the call are const. */ -static int cnt_const_args(ir_node *call) +/** + * Calculates how many arguments of the call are const, updates + * param distribution. + */ +static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call) { - int i, res = 0; + int i, num_const_args = 0, num_local_adr = 0; int n = get_Call_n_params(call); for (i = 0; i < n; ++i) { ir_node *param = get_Call_param(call, i); - ir_op *op = get_irn_op(param); - if (op == op_Const || op == op_SymConst) - ++res; + if (is_irn_constlike(param)) + ++num_const_args; + else if (is_Sel(param)) { + ir_node *base = param; + + do { + base = get_Sel_ptr(base); + } while (is_Sel(base)); + + if (base == get_irg_frame(current_ir_graph)) + ++num_local_adr; + } + } /* for */ - return res; -} /* cnt_const_args */ + + if (num_const_args > 0) + cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]); + if (num_const_args == n) + cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]); + if (num_local_adr > 0) + cnt_inc(&graph->cnt[gcnt_call_with_local_adr]); + + stat_inc_int_distrib_tbl(status->dist_param_cnt, n); +} /* analyse_params_of_Call */ /** * Update info on calls. @@ -724,11 +775,10 @@ static int cnt_const_args(ir_node *call) */ static void stat_update_call(ir_node *call, graph_entry_t *graph) { - ir_node *block = get_nodes_block(call); - ir_node *ptr = get_Call_ptr(call); - entity *ent = NULL; - ir_graph *callee = NULL; - int num_const_args; + ir_node *block = get_nodes_block(call); + ir_node *ptr = get_Call_ptr(call); + ir_entity *ent = NULL; + ir_graph *callee = NULL; /* * If the block is bad, the whole subgraph will collapse later @@ -738,12 +788,12 @@ static void stat_update_call(ir_node *call, graph_entry_t *graph) if (is_Bad(block)) return; - cnt_inc(&graph->cnt_all_calls); + cnt_inc(&graph->cnt[gcnt_all_calls]); /* found a call, this function is not a leaf */ graph->is_leaf = 0; - if (get_irn_op(ptr) == op_SymConst) { + if (is_SymConst(ptr)) { if (get_SymConst_kind(ptr) == symconst_addr_ent) { /* ok, we seems to know the entity */ ent = get_SymConst_entity(ptr); @@ -752,17 +802,19 @@ static void stat_update_call(ir_node *call, graph_entry_t *graph) /* it is recursive, if it calls at least once */ if (callee == graph->irg) graph->is_recursive = 1; + if (callee == NULL) + cnt_inc(&graph->cnt[gcnt_external_calls]); } /* if */ } else { /* indirect call, be could not predict */ - cnt_inc(&graph->cnt_indirect_calls); + cnt_inc(&graph->cnt[gcnt_indirect_calls]); /* NOT a leaf call */ graph->is_leaf_call = LCS_NON_LEAF_CALL; } /* if */ /* check, if it's a chain-call: Then, the call-block - * must dominate the end block. */ + * must dominate the end block. */ { ir_node *curr = get_irg_end_block(graph->irg); int depth = get_Block_dom_depth(block); @@ -770,7 +822,7 @@ static void stat_update_call(ir_node *call, graph_entry_t *graph) for (; curr != block && get_Block_dom_depth(curr) > depth;) { curr = get_Block_idom(curr); - if (! curr || is_no_Block(curr)) + if (! curr || !is_Block(curr)) break; } /* for */ @@ -788,22 +840,18 @@ static void stat_update_call(ir_node *call, graph_entry_t *graph) } /* if */ } /* if */ - /* check, if arguments of the call are const */ - num_const_args = cnt_const_args(call); - - if (num_const_args > 0) - cnt_inc(&graph->cnt_call_with_cnst_arg); -} /* stat_update_call */ + analyse_params_of_Call(graph, call); +} /* stat_update_call */ /** * Update info on calls for graphs on the wait queue. */ static void stat_update_call_2(ir_node *call, graph_entry_t *graph) { - ir_node *block = get_nodes_block(call); - ir_node *ptr = get_Call_ptr(call); - entity *ent = NULL; - ir_graph *callee = NULL; + ir_node *block = get_nodes_block(call); + ir_node *ptr = get_Call_ptr(call); + ir_entity *ent = NULL; + ir_graph *callee = NULL; /* * If the block is bad, the whole subgraph will collapse later @@ -813,7 +861,7 @@ static void stat_update_call_2(ir_node *call, graph_entry_t *graph) if (is_Bad(block)) return; - if (get_irn_op(ptr) == op_SymConst) { + if (is_SymConst(ptr)) { if (get_SymConst_kind(ptr) == symconst_addr_ent) { /* ok, we seems to know the entity */ ent = get_SymConst_entity(ptr); @@ -831,23 +879,91 @@ static void stat_update_call_2(ir_node *call, graph_entry_t *graph) graph->is_leaf_call = LCS_NON_LEAF_CALL; } else graph->is_leaf_call = LCS_NON_LEAF_CALL; -} /* stat_update_call_2 */ +} /* stat_update_call_2 */ + +/** + * Find the base address and entity of an Sel node. + * + * @param sel the node + * + * @return the base address. + */ +static ir_node *find_base_adr(ir_node *sel) +{ + ir_node *ptr = get_Sel_ptr(sel); + + while (is_Sel(ptr)) { + sel = ptr; + ptr = get_Sel_ptr(sel); + } + return ptr; +} /* find_base_adr */ + +/** + * Update info on Load/Store address statistics. + */ +static void stat_update_address(ir_node *node, graph_entry_t *graph) +{ + unsigned opc = get_irn_opcode(node); + ir_node *base; + ir_graph *irg; + + switch (opc) { + case iro_SymConst: + /* a global address */ + cnt_inc(&graph->cnt[gcnt_global_adr]); + break; + case iro_Sel: + base = find_base_adr(node); + irg = current_ir_graph; + if (base == get_irg_frame(irg)) { + /* a local Variable. */ + cnt_inc(&graph->cnt[gcnt_local_adr]); + } else { + /* Pointer access */ + if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) { + /* pointer access through parameter, check for THIS */ + ir_entity *ent = get_irg_entity(irg); + + if (ent != NULL) { + ir_type *ent_tp = get_entity_type(ent); + + if (get_method_calling_convention(ent_tp) & cc_this_call) { + if (get_Proj_proj(base) == 0) { + /* THIS pointer */ + cnt_inc(&graph->cnt[gcnt_this_adr]); + goto end_parameter; + } /* if */ + } /* if */ + } /* if */ + /* other parameter */ + cnt_inc(&graph->cnt[gcnt_param_adr]); +end_parameter: ; + } else { + /* unknown Pointer access */ + cnt_inc(&graph->cnt[gcnt_other_adr]); + } /* if */ + } /* if */ + default: + break; + } /* switch */ +} /* stat_update_address */ /** * Walker for reachable nodes count. */ static void update_node_stat(ir_node *node, void *env) { - graph_entry_t *graph = env; + graph_entry_t *graph = (graph_entry_t*)env; node_entry_t *entry; ir_op *op = stat_get_irn_op(node); - int arity = get_irn_arity(node); + int i, arity = get_irn_arity(node); entry = opcode_get_entry(op, graph->opcode_hash); cnt_inc(&entry->cnt_alive); - cnt_add_i(&graph->cnt_edges, arity); + cnt_add_i(&graph->cnt[gcnt_edges], arity); /* count block edges */ undate_block_info(node, graph); @@ -860,50 +976,83 @@ static void update_node_stat(ir_node *node, void *env) /* handle statistics for special node types */ - if (op == op_Const) { - if (status->stat_options & FIRMSTAT_COUNT_CONSTS) { - /* check properties of constants */ - stat_update_const(status, node, graph); - } /* if */ - } else if (op == op_Call) { + switch (op->code) { + case iro_Call: /* check for properties that depends on calls like recursion/leaf/indirect call */ stat_update_call(node, graph); + break; + case iro_Load: + /* check address properties */ + stat_update_address(get_Load_ptr(node), graph); + break; + case iro_Store: + /* check address properties */ + stat_update_address(get_Store_ptr(node), graph); + break; + case iro_Phi: + /* check for non-strict Phi nodes */ + for (i = arity - 1; i >= 0; --i) { + ir_node *pred = get_Phi_pred(node, i); + if (is_Unknown(pred)) { + /* found an Unknown predecessor, graph is not strict */ + graph->is_strict = 0; + break; + } + } + default: + break; + } /* switch */ + + /* we want to count the constant IN nodes, not the CSE'ed constant's itself */ + if (status->stat_options & FIRMSTAT_COUNT_CONSTS) { + int i; + + for (i = get_irn_arity(node) - 1; i >= 0; --i) { + ir_node *pred = get_irn_n(node, i); + + if (is_Const(pred)) { + /* check properties of constants */ + stat_update_const(status, pred, graph); + } /* if */ + } /* for */ } /* if */ -} /* update_node_stat */ +} /* update_node_stat */ /** * Walker for reachable nodes count for graphs on the wait_q. */ static void update_node_stat_2(ir_node *node, void *env) { - graph_entry_t *graph = env; + graph_entry_t *graph = (graph_entry_t*)env; /* check for properties that depends on calls like recursion/leaf/indirect call */ if (is_Call(node)) stat_update_call_2(node, graph); -} /* update_node_stat_2 */ +} /* update_node_stat_2 */ /** * Get the current address mark. */ static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node) { - address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node)); + address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node)); return value ? value->mark : 0; -} /* get_adr_mark */ +} /* get_adr_mark */ /** * Set the current address mark. */ static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val) { - address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node)); + address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node)); value->mark = val; -} /* set_adr_mark */ +} /* set_adr_mark */ -#if 0 +#undef DUMP_ADR_MODE + +#ifdef DUMP_ADR_MODE /** * a vcg attribute hook: Color a node with a different color if * it's identified as a part of an address expression or at least referenced @@ -927,24 +1076,45 @@ static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local) /* I know the color! */ return 1; -} /* stat_adr_mark_hook */ -#endif +} /* stat_adr_mark_hook */ +#endif /* DUMP_ADR_MODE */ + +/** + * Return the "operational" mode of a Firm node. + */ +static ir_mode *get_irn_op_mode(ir_node *node) +{ + switch (get_irn_opcode(node)) { + case iro_Load: + return get_Load_mode(node); + case iro_Store: + return get_irn_mode(get_Store_value(node)); + case iro_Div: + return get_irn_mode(get_Div_left(node)); + case iro_Mod: + return get_irn_mode(get_Mod_left(node)); + case iro_Cmp: + /* Cmp is no address calculation, or is it? */ + default: + return get_irn_mode(node); + } /* switch */ +} /* get_irn_op_mode */ /** - * walker that marks every node that is an address calculation + * Post-walker that marks every node that is an address calculation. * - * predecessor nodes must be visited first. We ensure this by - * calling in in the post of an outs walk. This should work even in cycles, - * while the pre in a normal walk will not. + * Users of a node must be visited first. We ensure this by + * calling it in the post of an outs walk. This should work even in cycles, + * while the normal pre-walk will not. */ static void mark_address_calc(ir_node *node, void *env) { - graph_entry_t *graph = env; - ir_mode *mode = get_irn_mode(node); + graph_entry_t *graph = (graph_entry_t*)env; + ir_mode *mode = get_irn_op_mode(node); int i, n; unsigned mark_preds = MARK_REF_NON_ADR; - if (! mode_is_numP(mode)) + if (! mode_is_data(mode)) return; if (mode_is_reference(mode)) { @@ -956,10 +1126,10 @@ static void mark_address_calc(ir_node *node, void *env) unsigned mark = get_adr_mark(graph, node); if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) { - /* - * this node has not an reference mode, but is only - * referenced by address calculations - */ + /* + * this node has no reference mode, but is only + * referenced by address calculations + */ mark_preds = MARK_REF_ADR; } /* if */ } /* if */ @@ -968,9 +1138,33 @@ static void mark_address_calc(ir_node *node, void *env) for (i = 0, n = get_irn_arity(node); i < n; ++i) { ir_node *pred = get_irn_n(node, i); + mode = get_irn_op_mode(pred); + if (! mode_is_data(mode)) + continue; + set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds); } /* for */ -} /* mark_address_calc */ +} /* mark_address_calc */ + +/** + * Post-walker that marks every node that is an address calculation. + * + * Users of a node must be visited first. We ensure this by + * calling it in the post of an outs walk. This should work even in cycles, + * while the normal pre-walk will not. + */ +static void count_adr_ops(ir_node *node, void *env) +{ + graph_entry_t *graph = (graph_entry_t*)env; + unsigned mark = get_adr_mark(graph, node); + + if (mark & MARK_ADDRESS_CALC) + cnt_inc(&graph->cnt[gcnt_pure_adr_ops]); + else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) + cnt_inc(&graph->cnt[gcnt_pure_adr_ops]); + else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR)) + cnt_inc(&graph->cnt[gcnt_all_adr_ops]); +} /* count_adr_ops */ /** * Called for every graph when the graph is either deleted or stat_dump_snapshot() @@ -982,17 +1176,19 @@ static void mark_address_calc(ir_node *node, void *env) static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph) { node_entry_t *entry; + int i; /* clear first the alive counter in the graph */ - for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) { + foreach_pset(graph->opcode_hash, node_entry_t*, entry) { cnt_clr(&entry->cnt_alive); - } /* for */ + } /* foreach_pset */ /* set pessimistic values */ graph->is_leaf = 1; graph->is_leaf_call = LCS_UNKNOWN; graph->is_recursive = 0; graph->is_chain_call = 1; + graph->is_strict = 1; /* create new block counter */ graph->block_hash = new_pset(block_cmp, 5); @@ -1014,7 +1210,7 @@ static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph) irg_walk_graph(graph->irg, update_node_stat, NULL, graph); #if 0 - /* Uncomment this code if chain-call means call exact one */ + /* Uncomment this code if chain-call means call exact one. */ entry = opcode_get_entry(op_Call, graph->opcode_hash); /* check if we have more than 1 call */ @@ -1027,34 +1223,32 @@ static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph) graph->is_chain_call = 0; /* assume we walk every graph only ONCE, we could sum here the global count */ - for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) { + foreach_pset(graph->opcode_hash, node_entry_t*, entry) { node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash); /* update the node counter */ cnt_add(&g_entry->cnt_alive, &entry->cnt_alive); - } /* for */ - - /* update the edge counter */ - cnt_add(&global->cnt_edges, &graph->cnt_edges); + } /* foreach_pset */ /* count the number of address calculation */ if (graph->irg != get_const_code_irg()) { ir_graph *rem = current_ir_graph; - if (get_irg_outs_state(graph->irg) != outs_consistent) - compute_irg_outs(graph->irg); + assure_irg_outs(graph->irg); /* Must be done an the outs graph */ current_ir_graph = graph->irg; irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph); current_ir_graph = rem; -#if 0 +#ifdef DUMP_ADR_MODE /* register the vcg hook and dump the graph for test */ set_dump_node_vcgattr_hook(stat_adr_mark_hook); dump_ir_block_graph(graph->irg, "-adr"); set_dump_node_vcgattr_hook(NULL); -#endif +#endif /* DUMP_ADR_MODE */ + + irg_walk_graph(graph->irg, NULL, count_adr_ops, graph); } /* if */ /* count the DAG's */ @@ -1074,7 +1268,11 @@ static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph) /* we have analyzed this graph */ graph->is_analyzed = 1; -} /* update_graph_stat */ + + /* accumulate all counter's */ + for (i = 0; i < _gcnt_last; ++i) + cnt_add(&global->cnt[i], &graph->cnt[i]); +} /* update_graph_stat */ /** * Called for every graph that was on the wait_q in stat_dump_snapshot() @@ -1085,6 +1283,7 @@ static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph) */ static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph) { + (void) global; if (graph->is_deleted) { /* deleted, ignore */ return; @@ -1097,25 +1296,22 @@ static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph) if (graph->is_leaf_call == LCS_UNKNOWN) graph->is_leaf_call = LCS_LEAF_CALL; } /* if */ -} /* update_graph_stat_2 */ +} /* update_graph_stat_2 */ /** * Register a dumper. */ static void stat_register_dumper(const dumper_t *dumper) { - dumper_t *p = xmalloc(sizeof(*p)); - - if (p) { - memcpy(p, dumper, sizeof(*p)); + dumper_t *p = XMALLOC(dumper_t); + *p = *dumper; - p->next = status->dumper; - p->status = status; - status->dumper = p; - } + p->next = status->dumper; + p->status = status; + status->dumper = p; /* FIXME: memory leak */ -} /* stat_register_dumper */ +} /* stat_register_dumper */ /** * Dumps the statistics of an IR graph. @@ -1128,7 +1324,7 @@ static void stat_dump_graph(graph_entry_t *entry) if (dumper->dump_graph) dumper->dump_graph(dumper, entry); } /* for */ -} /* stat_dump_graph */ +} /* stat_dump_graph */ /** * Calls all registered dumper functions. @@ -1141,11 +1337,11 @@ static void stat_dump_registered(graph_entry_t *entry) if (dumper->func_map) { dump_graph_FUNC func; - foreach_pset(dumper->func_map, func) + foreach_pset(dumper->func_map, dump_graph_FUNC, func) func(dumper, entry); } /* if */ } /* for */ -} /* stat_dump_registered */ +} /* stat_dump_registered */ /** * Dumps a constant table. @@ -1158,7 +1354,33 @@ static void stat_dump_consts(const constant_info_t *tbl) if (dumper->dump_const_tbl) dumper->dump_const_tbl(dumper, tbl); } /* for */ -} /* stat_dump_consts */ +} /* stat_dump_consts */ + +/** + * Dumps the parameter distribution + */ +static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global) +{ + dumper_t *dumper; + + for (dumper = status->dumper; dumper; dumper = dumper->next) { + if (dumper->dump_param_tbl) + dumper->dump_param_tbl(dumper, tbl, global); + } /* for */ +} /* stat_dump_param_tbl */ + +/** + * Dumps the optimization counter + */ +static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len) +{ + dumper_t *dumper; + + for (dumper = status->dumper; dumper; dumper = dumper->next) { + if (dumper->dump_opt_cnt) + dumper->dump_opt_cnt(dumper, tbl, len); + } /* for */ +} /* stat_dump_opt_cnt */ /** * Initialize the dumper. @@ -1171,7 +1393,7 @@ static void stat_dump_init(const char *name) if (dumper->init) dumper->init(dumper, name); } /* for */ -} /* stat_dump_init */ +} /* stat_dump_init */ /** * Finish the dumper. @@ -1184,30 +1406,31 @@ static void stat_dump_finish(void) if (dumper->finish) dumper->finish(dumper); } /* for */ -} /* stat_dump_finish */ +} /* stat_dump_finish */ /** * Register an additional function for all dumper. */ -void stat_register_dumper_func(dump_graph_FUNC func) { +void stat_register_dumper_func(dump_graph_FUNC func) +{ dumper_t *dumper; for (dumper = status->dumper; dumper; dumper = dumper->next) { if (! dumper->func_map) dumper->func_map = pset_new_ptr(3); - pset_insert_ptr(dumper->func_map, func); + pset_insert_ptr(dumper->func_map, (void*)func); } /* for */ -} /* stat_register_dumper_func */ +} /* stat_register_dumper_func */ /* ---------------------------------------------------------------------- */ /* * Helper: get an ir_op from an opcode. */ -ir_op *stat_get_op_from_opcode(opcode code) +ir_op *stat_get_op_from_opcode(unsigned code) { return opcode_find_entry(code, status->ir_op_hash); -} /* stat_get_op_from_opcode */ +} /* stat_get_op_from_opcode */ /** * Hook: A new IR op is registered. @@ -1217,6 +1440,7 @@ ir_op *stat_get_op_from_opcode(opcode code) */ static void stat_new_ir_op(void *ctx, ir_op *op) { + (void) ctx; if (! status->stat_options) return; @@ -1230,7 +1454,7 @@ static void stat_new_ir_op(void *ctx, ir_op *op) pset_insert(status->ir_op_hash, op, op->code); } STAT_LEAVE; -} /* stat_new_ir_op */ +} /* stat_new_ir_op */ /** * Hook: An IR op is freed. @@ -1240,6 +1464,8 @@ static void stat_new_ir_op(void *ctx, ir_op *op) */ static void stat_free_ir_op(void *ctx, ir_op *op) { + (void) ctx; + (void) op; if (! status->stat_options) return; @@ -1247,7 +1473,7 @@ static void stat_free_ir_op(void *ctx, ir_op *op) { } STAT_LEAVE; -} /* stat_free_ir_op */ +} /* stat_free_ir_op */ /** * Hook: A new node is created. @@ -1258,11 +1484,13 @@ static void stat_free_ir_op(void *ctx, ir_op *op) */ static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) { + (void) ctx; + (void) irg; if (! status->stat_options) return; /* do NOT count during dead node elimination */ - if (status->in_dead_node_elim > 0) + if (status->in_dead_node_elim) return; STAT_ENTER; @@ -1282,7 +1510,7 @@ static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) cnt_inc(&entry->new_node); } STAT_LEAVE; -} /* stat_new_node */ +} /* stat_new_node */ /** * Hook: A node is changed into a Id node @@ -1292,6 +1520,7 @@ static void stat_new_node(void *ctx, ir_graph *irg, ir_node *node) */ static void stat_turn_into_id(void *ctx, ir_node *node) { + (void) ctx; if (! status->stat_options) return; @@ -1312,7 +1541,38 @@ static void stat_turn_into_id(void *ctx, ir_node *node) cnt_inc(&entry->into_Id); } STAT_LEAVE; -} /* stat_turn_into_id */ +} /* stat_turn_into_id */ + +/** + * Hook: A node is normalized + * + * @param ctx the hook context + * @param node the IR node that was normalized + */ +static void stat_normalize(void *ctx, ir_node *node) +{ + (void) ctx; + if (! status->stat_options) + return; + + STAT_ENTER; + { + node_entry_t *entry; + graph_entry_t *graph; + ir_op *op = stat_get_irn_op(node); + + /* increase global value */ + graph = graph_get_entry(NULL, status->irg_hash); + entry = opcode_get_entry(op, graph->opcode_hash); + cnt_inc(&entry->normalized); + + /* increase local value */ + graph = graph_get_entry(current_ir_graph, status->irg_hash); + entry = opcode_get_entry(op, graph->opcode_hash); + cnt_inc(&entry->normalized); + } + STAT_LEAVE; +} /* stat_normalize */ /** * Hook: A new graph was created @@ -1321,8 +1581,9 @@ static void stat_turn_into_id(void *ctx, ir_node *node) * @param irg the new IR graph that was created * @param ent the entity of this graph */ -static void stat_new_graph(void *ctx, ir_graph *irg, entity *ent) +static void stat_new_graph(void *ctx, ir_graph *irg, ir_entity *ent) { + (void) ctx; if (! status->stat_options) return; @@ -1337,10 +1598,11 @@ static void stat_new_graph(void *ctx, ir_graph *irg, entity *ent) graph->is_leaf_call = 0; graph->is_recursive = 0; graph->is_chain_call = 0; + graph->is_strict = 1; graph->is_analyzed = 0; } STAT_LEAVE; -} /* stat_new_graph */ +} /* stat_new_graph */ /** * Hook: A graph will be deleted @@ -1354,6 +1616,7 @@ static void stat_new_graph(void *ctx, ir_graph *irg, entity *ent) */ static void stat_free_graph(void *ctx, ir_graph *irg) { + (void) ctx; if (! status->stat_options) return; @@ -1370,7 +1633,7 @@ static void stat_free_graph(void *ctx, ir_graph *irg) } /* if */ } STAT_LEAVE; -} /* stat_free_graph */ +} /* stat_free_graph */ /** * Hook: A walk over a graph is initiated. Do not count walks from statistic code. @@ -1382,6 +1645,9 @@ static void stat_free_graph(void *ctx, ir_graph *irg) */ static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_func *post) { + (void) ctx; + (void) pre; + (void) post; if (! status->stat_options) return; @@ -1389,10 +1655,10 @@ static void stat_irg_walk(void *ctx, ir_graph *irg, generic_func *pre, generic_f { graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_walked); + cnt_inc(&graph->cnt[gcnt_acc_walked]); } STAT_LEAVE; -} /* stat_irg_walk */ +} /* stat_irg_walk */ /** * Hook: A walk over a graph in block-wise order is initiated. Do not count walks from statistic code. @@ -1406,7 +1672,7 @@ static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, g { /* for now, do NOT differentiate between blockwise and normal */ stat_irg_walk(ctx, irg, pre, post); -} /* stat_irg_walk_blkwise */ +} /* stat_irg_walk_blkwise */ /** * Hook: A walk over the graph's blocks is initiated. Do not count walks from statistic code. @@ -1419,6 +1685,10 @@ static void stat_irg_walk_blkwise(void *ctx, ir_graph *irg, generic_func *pre, g */ static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic_func *pre, generic_func *post) { + (void) ctx; + (void) node; + (void) pre; + (void) post; if (! status->stat_options) return; @@ -1426,25 +1696,31 @@ static void stat_irg_block_walk(void *ctx, ir_graph *irg, ir_node *node, generic { graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_walked_blocks); + cnt_inc(&graph->cnt[gcnt_acc_walked_blocks]); } STAT_LEAVE; -} /* stat_irg_block_walk */ +} /* stat_irg_block_walk */ /** * Called for every node that is removed due to an optimization. * * @param n the IR node that will be removed * @param hmap the hash map containing ir_op* -> opt_entry_t* + * @param kind the optimization kind */ -static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap) +static void removed_due_opt(ir_node *n, hmap_opt_entry_t *hmap, hook_opt_kind kind) { - ir_op *op = stat_get_irn_op(n); - opt_entry_t *entry = opt_get_entry(op, hmap); + opt_entry_t *entry; + ir_op *op = stat_get_irn_op(n); + + /* ignore CSE for Constants */ + if (kind == HOOK_OPT_CSE && (is_Const(n) || is_SymConst(n))) + return; /* increase global value */ + entry = opt_get_entry(op, hmap); cnt_inc(&entry->count); -} /* removed_due_opt */ +} /* removed_due_opt */ /** * Hook: Some nodes were optimized into some others due to an optimization. @@ -1457,6 +1733,7 @@ static void stat_merge_nodes( ir_node **old_node_array, int old_num_entries, hook_opt_kind opt) { + (void) ctx; if (! status->stat_options) return; @@ -1465,33 +1742,34 @@ static void stat_merge_nodes( int i, j; graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash); + cnt_inc(&status->num_opts[opt]); if (status->reassoc_run) opt = HOOK_OPT_REASSOC; for (i = 0; i < old_num_entries; ++i) { + /* nodes might be in new and old, so if we found a node + in both sets, this one is NOT removed */ for (j = 0; j < new_num_entries; ++j) { if (old_node_array[i] == new_node_array[j]) break; + } /* for */ + if (j >= new_num_entries) { + int xopt = opt; - /* nodes might be in new and old, these are NOT removed */ - if (j >= new_num_entries) { - int xopt = opt; - - /* sometimes we did not detect, that it is replaced by a Const */ - if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) { - ir_op *op = get_irn_op(new_node_array[0]); - - if (op == op_Const || op == op_SymConst) - xopt = HOOK_OPT_CONFIRM_C; - } /* if */ + /* sometimes we did not detect, that it is replaced by a Const */ + if (opt == HOOK_OPT_CONFIRM && new_num_entries == 1) { + ir_op *op = get_irn_op(new_node_array[0]); - removed_due_opt(old_node_array[i], graph->opt_hash[xopt]); + if (op == op_Const || op == op_SymConst) + xopt = HOOK_OPT_CONFIRM_C; } /* if */ - } /* for */ + + removed_due_opt(old_node_array[i], graph->opt_hash[xopt], (hook_opt_kind)xopt); + } /* if */ } /* for */ } STAT_LEAVE; -} /* stat_merge_nodes */ +} /* stat_merge_nodes */ /** * Hook: Reassociation is started/stopped. @@ -1501,6 +1779,7 @@ static void stat_merge_nodes( */ static void stat_reassociate(void *ctx, int flag) { + (void) ctx; if (! status->stat_options) return; @@ -1509,7 +1788,7 @@ static void stat_reassociate(void *ctx, int flag) status->reassoc_run = flag; } STAT_LEAVE; -} /* stat_reassociate */ +} /* stat_reassociate */ /** * Hook: A node was lowered into other nodes @@ -1519,6 +1798,7 @@ static void stat_reassociate(void *ctx, int flag) */ static void stat_lower(void *ctx, ir_node *node) { + (void) ctx; if (! status->stat_options) return; @@ -1526,10 +1806,10 @@ static void stat_lower(void *ctx, ir_node *node) { graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash); - removed_due_opt(node, graph->opt_hash[HOOK_LOWERED]); + removed_due_opt(node, graph->opt_hash[HOOK_LOWERED], HOOK_LOWERED); } STAT_LEAVE; -} /* stat_lower */ +} /* stat_lower */ /** * Hook: A graph was inlined. @@ -1541,6 +1821,7 @@ static void stat_lower(void *ctx, ir_node *node) */ static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg) { + (void) ctx; if (! status->stat_options) return; @@ -1550,11 +1831,11 @@ static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg) graph_entry_t *i_graph = graph_get_entry(called_irg, status->irg_hash); graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_got_inlined); - cnt_inc(&i_graph->cnt_was_inlined); + cnt_inc(&graph->cnt[gcnt_acc_got_inlined]); + cnt_inc(&i_graph->cnt[gcnt_acc_was_inlined]); } STAT_LEAVE; -} /* stat_inline */ +} /* stat_inline */ /** * Hook: A graph with tail-recursions was optimized. @@ -1563,6 +1844,7 @@ static void stat_inline(void *ctx, ir_node *call, ir_graph *called_irg) */ static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) { + (void) ctx; if (! status->stat_options) return; @@ -1573,7 +1855,7 @@ static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) graph->num_tail_recursion += n_calls; } STAT_LEAVE; -} /* stat_tail_rec */ +} /* stat_tail_rec */ /** * Strength reduction was performed on an iteration variable. @@ -1582,18 +1864,19 @@ static void stat_tail_rec(void *ctx, ir_graph *irg, int n_calls) */ static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong) { + (void) ctx; if (! status->stat_options) return; STAT_ENTER; { graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_strength_red); + cnt_inc(&graph->cnt[gcnt_acc_strength_red]); - removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED]); + removed_due_opt(strong, graph->opt_hash[HOOK_OPT_STRENGTH_RED], HOOK_OPT_STRENGTH_RED); } STAT_LEAVE; -} /* stat_strength_red */ +} /* stat_strength_red */ /** * Hook: Start/Stop the dead node elimination. @@ -1602,14 +1885,13 @@ static void stat_strength_red(void *ctx, ir_graph *irg, ir_node *strong) */ static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start) { + (void) ctx; + (void) irg; if (! status->stat_options) return; - if (start) - ++status->in_dead_node_elim; - else - --status->in_dead_node_elim; -} /* stat_dead_node_elim */ + status->in_dead_node_elim = (start != 0); +} /* stat_dead_node_elim */ /** * Hook: if-conversion was tried. @@ -1617,6 +1899,10 @@ static void stat_dead_node_elim(void *ctx, ir_graph *irg, int start) static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi, int pos, ir_node *mux, if_result_t reason) { + (void) context; + (void) phi; + (void) pos; + (void) mux; if (! status->stat_options) return; @@ -1624,16 +1910,18 @@ static void stat_if_conversion(void *context, ir_graph *irg, ir_node *phi, { graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_if_conv[reason]); + cnt_inc(&graph->cnt[gcnt_if_conv + reason]); } STAT_LEAVE; -} /* stat_if_conversion */ +} /* stat_if_conversion */ /** * Hook: real function call was optimized. */ static void stat_func_call(void *context, ir_graph *irg, ir_node *call) { + (void) context; + (void) call; if (! status->stat_options) return; @@ -1641,10 +1929,10 @@ static void stat_func_call(void *context, ir_graph *irg, ir_node *call) { graph_entry_t *graph = graph_get_entry(irg, status->irg_hash); - cnt_inc(&graph->cnt_real_func_call); + cnt_inc(&graph->cnt[gcnt_acc_real_func_call]); } STAT_LEAVE; -} /* stat_func_call */ +} /* stat_func_call */ /** * Hook: A multiply was replaced by a series of Shifts/Adds/Subs. @@ -1653,16 +1941,17 @@ static void stat_func_call(void *context, ir_graph *irg, ir_node *call) */ static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul) { + (void) ctx; if (! status->stat_options) return; STAT_ENTER; { graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash); - removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP]); + removed_due_opt(mul, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP); } STAT_LEAVE; -} /* stat_arch_dep_replace_mul_with_shifts */ +} /* stat_arch_dep_replace_mul_with_shifts */ /** * Hook: A division by const was replaced. @@ -1672,16 +1961,17 @@ static void stat_arch_dep_replace_mul_with_shifts(void *ctx, ir_node *mul) */ static void stat_arch_dep_replace_division_by_const(void *ctx, ir_node *node) { + (void) ctx; if (! status->stat_options) return; STAT_ENTER; { graph_entry_t *graph = graph_get_entry(current_ir_graph, status->irg_hash); - removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP]); + removed_due_opt(node, graph->opt_hash[HOOK_OPT_ARCH_DEP], HOOK_OPT_ARCH_DEP); } STAT_LEAVE; -} /* stat_arch_dep_replace_division_by_const */ +} /* stat_arch_dep_replace_division_by_const */ /* * Update the register pressure of a block. @@ -1703,8 +1993,7 @@ void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, cons reg_pressure_entry_t *rp_ent; block_ent = be_block_get_entry(&status->be_data, get_irn_node_nr(block), graph->be_block_hash); - rp_ent = obstack_alloc(&status->be_data, sizeof(*rp_ent)); - memset(rp_ent, 0, sizeof(*rp_ent)); + rp_ent = OALLOCZ(&status->be_data, reg_pressure_entry_t); rp_ent->class_name = class_name; rp_ent->pressure = pressure; @@ -1712,7 +2001,7 @@ void stat_be_block_regpressure(ir_graph *irg, ir_node *block, int pressure, cons pset_insert(block_ent->reg_pressure, rp_ent, HASH_PTR(class_name)); } STAT_LEAVE; -} /* stat_be_block_regpressure */ +} /* stat_be_block_regpressure */ /** * Update the distribution of ready nodes of a block @@ -1737,7 +2026,7 @@ void stat_be_block_sched_ready(ir_graph *irg, ir_node *block, int num_ready) stat_inc_int_distrib_tbl(block_ent->sched_ready, num_ready); } STAT_LEAVE; -} /* stat_be_block_sched_ready */ +} /* stat_be_block_sched_ready */ /** * Update the permutation statistic of a block. @@ -1773,7 +2062,7 @@ void stat_be_block_stat_perm(const char *class_name, int n_regs, ir_node *perm, ps_ent->real_size = real_size; } STAT_LEAVE; -} /* stat_be_block_stat_perm */ +} /* stat_be_block_stat_perm */ /** * Update the permutation statistic of a single perm. @@ -1811,14 +2100,14 @@ void stat_be_block_stat_permcycle(const char *class_name, ir_node *perm, ir_node } /* if */ } STAT_LEAVE; -} /* stat_be_block_stat_permcycle */ +} /* stat_be_block_stat_permcycle */ /* Dumps a statistics snapshot. */ void stat_dump_snapshot(const char *name, const char *phase) { char fname[2048]; const char *p; - int l; + size_t l; if (! status->stat_options) return; @@ -1853,7 +2142,7 @@ void stat_dump_snapshot(const char *name, const char *phase) ++p; l = p - name; - if (l > sizeof(fname) - 1) + if (l > (int) (sizeof(fname) - 1)) l = sizeof(fname) - 1; memcpy(fname, name, l); @@ -1862,15 +2151,16 @@ void stat_dump_snapshot(const char *name, const char *phase) fname[0] = '\0'; p = name; } /* if */ - strncat(fname, "firmstat-", sizeof(fname)); - strncat(fname, phase, sizeof(fname)); - strncat(fname, "-", sizeof(fname)); - strncat(fname, p, sizeof(fname)); + strncat(fname, "firmstat-", sizeof(fname)-1); + strncat(fname, phase, sizeof(fname)-1); + strncat(fname, "-", sizeof(fname)-1); + strncat(fname, p, sizeof(fname)-1); stat_dump_init(fname); /* calculate the graph statistics */ - for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) { + for (entry = (graph_entry_t*)pset_first(status->irg_hash); + entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) { if (entry->irg == NULL) { /* special entry for the global count */ continue; @@ -1883,13 +2173,14 @@ void stat_dump_snapshot(const char *name, const char *phase) /* some calculations are dependent, we pushed them on the wait_q */ while (! pdeq_empty(status->wait_q)) { - entry = pdeq_getr(status->wait_q); + entry = (graph_entry_t*)pdeq_getr(status->wait_q); update_graph_stat_2(global, entry); } /* while */ /* dump per graph */ - for (entry = pset_first(status->irg_hash); entry; entry = pset_next(status->irg_hash)) { + for (entry = (graph_entry_t*)pset_first(status->irg_hash); + entry != NULL; entry = (graph_entry_t*)pset_next(status->irg_hash)) { if (entry->irg == NULL) { /* special entry for the global count */ continue; @@ -1913,23 +2204,87 @@ void stat_dump_snapshot(const char *name, const char *phase) if (status->stat_options & FIRMSTAT_COUNT_CONSTS) stat_dump_consts(&status->const_info); + /* dump the parameter distribution */ + stat_dump_param_tbl(status->dist_param_cnt, global); + + /* dump the optimization counter and clear them */ + stat_dump_opt_cnt(status->num_opts, ARRAY_SIZE(status->num_opts)); + clear_optimization_counter(); + stat_dump_finish(); - stat_finish_pattern_history(); + stat_finish_pattern_history(fname); - /* clear the global counter here */ + /* clear the global counters here */ { node_entry_t *entry; - for (entry = pset_first(global->opcode_hash); entry; entry = pset_next(global->opcode_hash)) { + for (entry = (node_entry_t*)pset_first(global->opcode_hash); + entry != NULL; entry = (node_entry_t*)pset_next(global->opcode_hash)) { opcode_clear_entry(entry); } /* for */ /* clear all global counter */ - graph_clear_entry(global, 1); + graph_clear_entry(global, /*all=*/1); } } STAT_LEAVE; -} /* stat_dump_snapshot */ +} /* stat_dump_snapshot */ + +typedef struct pass_t { + ir_prog_pass_t pass; + const char *fname; + const char *phase; +} pass_t; + +/** + * Wrapper to run stat_dump_snapshot() as a ir_prog wrapper. + */ +static int stat_dump_snapshot_wrapper(ir_prog *irp, void *context) +{ + pass_t *pass = (pass_t*)context; + + (void)irp; + stat_dump_snapshot(pass->fname, pass->phase); + return 0; +} /* stat_dump_snapshot_wrapper */ + +/** + * Ensure that no verifier is run from the wrapper. + */ +static int no_verify(ir_prog *prog, void *ctx) +{ + (void)prog; + (void)ctx; + return 0; +} + +/** + * Ensure that no dumper is run from the wrapper. + */ +static void no_dump(ir_prog *prog, void *ctx, unsigned idx) +{ + (void)prog; + (void)ctx; + (void)idx; +} + +/* create an ir_pog pass */ +ir_prog_pass_t *stat_dump_snapshot_pass( + const char *name, const char *fname, const char *phase) +{ + pass_t *pass = XMALLOCZ(pass_t); + + def_prog_pass_constructor( + &pass->pass, name ? name : "stat_snapshot", stat_dump_snapshot_wrapper); + pass->fname = fname; + pass->phase = phase; + + /* no dump/verify */ + pass->pass.dump_irprog = no_dump; + pass->pass.verify_irprog = no_verify; + + return &pass->pass; +} /* stat_dump_snapshot_pass */ /** the hook entries for the Firm statistics module */ static hook_entry_t stat_hooks[hook_last]; @@ -1945,8 +2300,7 @@ void firm_init_stat(unsigned enable_options) if (! (enable_options & FIRMSTAT_ENABLED)) return; - status = xmalloc(sizeof(*status)); - memset(status, 0, sizeof(*status)); + status = XMALLOCZ(stat_info_t); /* enable statistics */ status->stat_options = enable_options & FIRMSTAT_ENABLED ? enable_options : 0; @@ -1956,6 +2310,7 @@ void firm_init_stat(unsigned enable_options) HOOK(hook_free_ir_op, stat_free_ir_op); HOOK(hook_new_node, stat_new_node); HOOK(hook_turn_into_id, stat_turn_into_id); + HOOK(hook_normalize, stat_normalize); HOOK(hook_new_graph, stat_new_graph); HOOK(hook_free_graph, stat_free_graph); HOOK(hook_irg_walk, stat_irg_walk); @@ -2004,16 +2359,12 @@ void firm_init_stat(unsigned enable_options) _op_ModC.code = --num; _op_ModC.name = new_id_from_chars(X("ModC")); - _op_DivModC.code = --num; - _op_DivModC.name = new_id_from_chars(X("DivModC")); - status->op_Phi0 = &_op_Phi0; status->op_PhiM = &_op_PhiM; status->op_ProjM = &_op_ProjM; status->op_MulC = &_op_MulC; status->op_DivC = &_op_DivC; status->op_ModC = &_op_ModC; - status->op_DivModC = &_op_DivModC; } else { status->op_Phi0 = NULL; status->op_PhiM = NULL; @@ -2021,7 +2372,6 @@ void firm_init_stat(unsigned enable_options) status->op_MulC = NULL; status->op_DivC = NULL; status->op_ModC = NULL; - status->op_DivModC = NULL; } /* if */ /* for Florian: count the Sel depth */ @@ -2052,14 +2402,20 @@ void firm_init_stat(unsigned enable_options) if (enable_options & FIRMSTAT_COUNT_CONSTS) stat_init_const_cnt(status); + /* distribution table for parameter counts */ + status->dist_param_cnt = stat_new_int_distrib_tbl(); + + clear_optimization_counter(); + #undef HOOK #undef X -} /* firm_init_stat */ +} /* firm_init_stat */ /** * Frees all dumper structures. */ -static void stat_term_dumper(void) { +static void stat_term_dumper(void) +{ dumper_t *dumper, *next_dumper; for (dumper = status->dumper; dumper; /* iteration done in loop body */ ) { @@ -2070,11 +2426,12 @@ static void stat_term_dumper(void) { free(dumper); dumper = next_dumper; } /* for */ -} /* stat_term_dumper */ +} /* stat_term_dumper */ /* Terminates the statistics module, frees all memory. */ -void stat_term(void) { +void stat_term(void) +{ if (status != (stat_info_t *)&status_disable) { obstack_free(&status->be_data, NULL); obstack_free(&status->cnts, NULL); @@ -2084,22 +2441,10 @@ void stat_term(void) { xfree(status); status = (stat_info_t *)&status_disable; } -} /* stat_term */ +} /* stat_term */ /* returns 1 if statistics were initialized, 0 otherwise */ -int stat_is_active(void) { +int stat_is_active(void) +{ return status != (stat_info_t *)&status_disable; -} /* stat_is_active */ - -#else - -/* initialize the statistics module. */ -void firm_init_stat(unsigned enable_options) {} - -/* Dumps a statistics snapshot */ -void stat_dump_snapshot(const char *name, const char *phase) {} - -/* terminates the statistics module, frees all memory */ -void stat_term(void); - -#endif /* FIRM_STATISTICS */ +} /* stat_is_active */