+ return value ? value->mark : 0;
+} /* get_adr_mark */
+
+/**
+ * Set the current address mark.
+ */
+static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
+{
+ address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
+
+ value->mark = val;
+} /* set_adr_mark */
+
+#undef DUMP_ADR_MODE
+
+#ifdef DUMP_ADR_MODE
+/**
+ * a vcg attribute hook: Color a node with a different color if
+ * it's identified as a part of an address expression or at least referenced
+ * by an address expression.
+ */
+static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
+{
+ ir_node *n = local ? local : node;
+ ir_graph *irg = get_irn_irg(n);
+ graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
+ unsigned mark = get_adr_mark(graph, n);
+
+ if (mark & MARK_ADDRESS_CALC)
+ fprintf(F, "color: purple");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
+ fprintf(F, "color: pink");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
+ fprintf(F, "color: lightblue");
+ else
+ return 0;
+
+ /* I know the color! */
+ return 1;
+} /* stat_adr_mark_hook */
+#endif /* DUMP_ADR_MODE */
+
+/**
+ * Return the "operational" mode of a Firm node.
+ */
+static ir_mode *get_irn_op_mode(ir_node *node)
+{
+ switch (get_irn_opcode(node)) {
+ case iro_Load:
+ return get_Load_mode(node);
+ case iro_Store:
+ return get_irn_mode(get_Store_value(node));
+ case iro_Div:
+ return get_irn_mode(get_Div_left(node));
+ case iro_Mod:
+ return get_irn_mode(get_Mod_left(node));
+ case iro_Cmp:
+ /* Cmp is no address calculation, or is it? */
+ default:
+ return get_irn_mode(node);
+ } /* switch */
+} /* get_irn_op_mode */
+
+/**
+ * Post-walker that marks every node that is an address calculation.
+ *
+ * Users of a node must be visited first. We ensure this by
+ * calling it in the post of an outs walk. This should work even in cycles,
+ * while the normal pre-walk will not.
+ */
+static void mark_address_calc(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+ ir_mode *mode = get_irn_op_mode(node);
+ int i, n;
+ unsigned mark_preds = MARK_REF_NON_ADR;
+
+ if (! mode_is_data(mode))
+ return;
+
+ if (mode_is_reference(mode)) {
+ /* a reference is calculated here, we are sure */
+ set_adr_mark(graph, node, MARK_ADDRESS_CALC);
+
+ mark_preds = MARK_REF_ADR;
+ } else {
+ unsigned mark = get_adr_mark(graph, node);
+
+ if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
+ /*
+ * this node has no reference mode, but is only
+ * referenced by address calculations
+ */
+ mark_preds = MARK_REF_ADR;
+ } /* if */
+ } /* if */
+
+ /* mark all predecessors */
+ for (i = 0, n = get_irn_arity(node); i < n; ++i) {
+ ir_node *pred = get_irn_n(node, i);
+
+ mode = get_irn_op_mode(pred);
+ if (! mode_is_data(mode))
+ continue;
+
+ set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
+ } /* for */
+} /* mark_address_calc */
+
+/**
+ * Post-walker that marks every node that is an address calculation.
+ *
+ * Users of a node must be visited first. We ensure this by
+ * calling it in the post of an outs walk. This should work even in cycles,
+ * while the normal pre-walk will not.
+ */
+static void count_adr_ops(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+ unsigned mark = get_adr_mark(graph, node);
+
+ if (mark & MARK_ADDRESS_CALC)
+ cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
+ cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
+ cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
+} /* count_adr_ops */
+
+/**
+ * Called for every graph when the graph is either deleted or stat_dump_snapshot()
+ * is called, must recalculate all statistic info.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
+{
+ node_entry_t *entry;
+ int i;
+
+ /* clear first the alive counter in the graph */
+ foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
+ cnt_clr(&entry->cnt_alive);
+ } /* foreach_pset */
+
+ /* set pessimistic values */
+ graph->is_leaf = 1;
+ graph->is_leaf_call = LCS_UNKNOWN;
+ graph->is_recursive = 0;
+ graph->is_chain_call = 1;
+ graph->is_strict = 1;
+
+ /* create new block counter */
+ graph->block_hash = new_pset(block_cmp, 5);
+
+ /* we need dominator info */
+ if (graph->irg != get_const_code_irg()) {
+ assure_doms(graph->irg);
+
+ if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
+ /* we need extended basic blocks */
+ compute_extbb(graph->irg);
+
+ /* create new extbb counter */
+ graph->extbb_hash = new_pset(block_cmp, 5);
+ } /* if */
+ } /* if */
+
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
+
+#if 0
+ /* Uncomment this code if chain-call means call exact one. */
+ entry = opcode_get_entry(op_Call, graph->opcode_hash);
+
+ /* check if we have more than 1 call */
+ if (cnt_gt(entry->cnt_alive, 1))
+ graph->is_chain_call = 0;
+#endif
+
+ /* recursive functions are never chain calls, leafs don't have calls */
+ if (graph->is_recursive || graph->is_leaf)
+ graph->is_chain_call = 0;
+
+ /* assume we walk every graph only ONCE, we could sum here the global count */
+ foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
+ node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
+
+ /* update the node counter */
+ cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
+ } /* foreach_pset */
+
+ /* count the number of address calculation */
+ if (graph->irg != get_const_code_irg()) {
+ ir_graph *rem = current_ir_graph;
+
+ assure_irg_outs(graph->irg);
+
+ /* Must be done an the outs graph */
+ current_ir_graph = graph->irg;
+ irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
+ current_ir_graph = rem;
+
+#ifdef DUMP_ADR_MODE
+ /* register the vcg hook and dump the graph for test */
+ set_dump_node_vcgattr_hook(stat_adr_mark_hook);
+ dump_ir_block_graph(graph->irg, "-adr");
+ set_dump_node_vcgattr_hook(NULL);
+#endif /* DUMP_ADR_MODE */
+
+ irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
+ } /* if */
+
+ /* count the DAG's */
+ if (status->stat_options & FIRMSTAT_COUNT_DAG)
+ count_dags_in_graph(global, graph);
+
+ /* calculate the patterns of this graph */
+ stat_calc_pattern_history(graph->irg);
+
+ /* leaf function did not call others */
+ if (graph->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ else if (graph->is_leaf_call == LCS_UNKNOWN) {
+ /* we still don't know if this graph calls leaf-functions, so enqueue */
+ pdeq_putl(status->wait_q, graph);
+ } /* if */
+
+ /* we have analyzed this graph */
+ graph->is_analyzed = 1;
+
+ /* accumulate all counter's */
+ for (i = 0; i < _gcnt_last; ++i)
+ cnt_add(&global->cnt[i], &graph->cnt[i]);
+} /* update_graph_stat */
+
+/**
+ * Called for every graph that was on the wait_q in stat_dump_snapshot()
+ * must finish all statistic info calculations.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
+{
+ (void) global;
+ if (graph->is_deleted) {
+ /* deleted, ignore */
+ return;
+ }
+
+ if (graph->irg) {
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
+
+ if (graph->is_leaf_call == LCS_UNKNOWN)
+ graph->is_leaf_call = LCS_LEAF_CALL;
+ } /* if */
+} /* update_graph_stat_2 */
+
+/**
+ * Register a dumper.
+ */
+static void stat_register_dumper(const dumper_t *dumper)
+{
+ dumper_t *p = XMALLOC(dumper_t);
+
+ memcpy(p, dumper, sizeof(*p));
+
+ p->next = status->dumper;
+ p->status = status;
+ status->dumper = p;
+
+ /* FIXME: memory leak */
+} /* stat_register_dumper */
+
+/**
+ * Dumps the statistics of an IR graph.
+ */
+static void stat_dump_graph(graph_entry_t *entry)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_graph)
+ dumper->dump_graph(dumper, entry);
+ } /* for */
+} /* stat_dump_graph */
+
+/**
+ * Calls all registered dumper functions.
+ */
+static void stat_dump_registered(graph_entry_t *entry)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->func_map) {
+ dump_graph_FUNC func;
+
+ foreach_pset(dumper->func_map, dump_graph_FUNC, func)
+ func(dumper, entry);
+ } /* if */
+ } /* for */
+} /* stat_dump_registered */
+
+/**
+ * Dumps a constant table.
+ */
+static void stat_dump_consts(const constant_info_t *tbl)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_const_tbl)
+ dumper->dump_const_tbl(dumper, tbl);
+ } /* for */
+} /* stat_dump_consts */
+
+/**
+ * Dumps the parameter distribution
+ */
+static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_param_tbl)
+ dumper->dump_param_tbl(dumper, tbl, global);
+ } /* for */
+} /* stat_dump_param_tbl */
+
+/**
+ * Dumps the optimization counter
+ */
+static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_opt_cnt)
+ dumper->dump_opt_cnt(dumper, tbl, len);
+ } /* for */
+} /* stat_dump_opt_cnt */
+
+/**
+ * Initialize the dumper.
+ */
+static void stat_dump_init(const char *name)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->init)
+ dumper->init(dumper, name);
+ } /* for */
+} /* stat_dump_init */
+
+/**
+ * Finish the dumper.
+ */
+static void stat_dump_finish(void)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->finish)
+ dumper->finish(dumper);
+ } /* for */
+} /* stat_dump_finish */
+
+/**
+ * Register an additional function for all dumper.
+ */
+void stat_register_dumper_func(dump_graph_FUNC func)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (! dumper->func_map)
+ dumper->func_map = pset_new_ptr(3);
+ pset_insert_ptr(dumper->func_map, (void*)func);
+ } /* for */
+} /* stat_register_dumper_func */
+
+/* ---------------------------------------------------------------------- */