+/**
+ * Calculates how many arguments of the call are const, updates
+ * param distribution.
+ */
+static void analyse_params_of_Call(graph_entry_t *graph, ir_node *call)
+{
+ int i, num_const_args = 0, num_local_adr = 0;
+ int n = get_Call_n_params(call);
+
+ for (i = 0; i < n; ++i) {
+ ir_node *param = get_Call_param(call, i);
+
+ if (is_irn_constlike(param))
+ ++num_const_args;
+ else if (is_Sel(param)) {
+ ir_node *base = param;
+
+ do {
+ base = get_Sel_ptr(base);
+ } while (is_Sel(base));
+
+ if (base == get_irg_frame(current_ir_graph))
+ ++num_local_adr;
+ }
+
+ } /* for */
+
+ if (num_const_args > 0)
+ cnt_inc(&graph->cnt[gcnt_call_with_cnst_arg]);
+ if (num_const_args == n)
+ cnt_inc(&graph->cnt[gcnt_call_with_all_cnst_arg]);
+ if (num_local_adr > 0)
+ cnt_inc(&graph->cnt[gcnt_call_with_local_adr]);
+
+ stat_inc_int_distrib_tbl(status->dist_param_cnt, n);
+} /* analyse_params_of_Call */
+
+/**
+ * Update info on calls.
+ *
+ * @param call The call
+ * @param graph The graph entry containing the call
+ */
+static void stat_update_call(ir_node *call, graph_entry_t *graph)
+{
+ ir_node *block = get_nodes_block(call);
+ ir_node *ptr = get_Call_ptr(call);
+ ir_entity *ent = NULL;
+ ir_graph *callee = NULL;
+
+ /*
+ * If the block is bad, the whole subgraph will collapse later
+ * so do not count this call.
+ * This happens in dead code.
+ */
+ if (is_Bad(block))
+ return;
+
+ cnt_inc(&graph->cnt[gcnt_all_calls]);
+
+ /* found a call, this function is not a leaf */
+ graph->is_leaf = 0;
+
+ if (is_SymConst(ptr)) {
+ if (get_SymConst_kind(ptr) == symconst_addr_ent) {
+ /* ok, we seems to know the entity */
+ ent = get_SymConst_entity(ptr);
+ callee = get_entity_irg(ent);
+
+ /* it is recursive, if it calls at least once */
+ if (callee == graph->irg)
+ graph->is_recursive = 1;
+ if (callee == NULL)
+ cnt_inc(&graph->cnt[gcnt_external_calls]);
+ } /* if */
+ } else {
+ /* indirect call, be could not predict */
+ cnt_inc(&graph->cnt[gcnt_indirect_calls]);
+
+ /* NOT a leaf call */
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ } /* if */
+
+ /* check, if it's a chain-call: Then, the call-block
+ * must dominate the end block. */
+ {
+ ir_node *curr = get_irg_end_block(graph->irg);
+ int depth = get_Block_dom_depth(block);
+
+ for (; curr != block && get_Block_dom_depth(curr) > depth;) {
+ curr = get_Block_idom(curr);
+
+ if (! curr || !is_Block(curr))
+ break;
+ } /* for */
+
+ if (curr != block)
+ graph->is_chain_call = 0;
+ }
+
+ /* check, if the callee is a leaf */
+ if (callee) {
+ graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
+
+ if (called->is_analyzed) {
+ if (! called->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ } /* if */
+ } /* if */
+
+ analyse_params_of_Call(graph, call);
+} /* stat_update_call */
+
+/**
+ * Update info on calls for graphs on the wait queue.
+ */
+static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
+{
+ ir_node *block = get_nodes_block(call);
+ ir_node *ptr = get_Call_ptr(call);
+ ir_entity *ent = NULL;
+ ir_graph *callee = NULL;
+
+ /*
+ * If the block is bad, the whole subgraph will collapse later
+ * so do not count this call.
+ * This happens in dead code.
+ */
+ if (is_Bad(block))
+ return;
+
+ if (is_SymConst(ptr)) {
+ if (get_SymConst_kind(ptr) == symconst_addr_ent) {
+ /* ok, we seems to know the entity */
+ ent = get_SymConst_entity(ptr);
+ callee = get_entity_irg(ent);
+ } /* if */
+ } /* if */
+
+ /* check, if the callee is a leaf */
+ if (callee) {
+ graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
+
+ assert(called->is_analyzed);
+
+ if (! called->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ } else
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+} /* stat_update_call_2 */
+
+/**
+ * Find the base address and entity of an Sel node.
+ *
+ * @param sel the node
+ *
+ * @return the base address.
+ */
+static ir_node *find_base_adr(ir_node *sel)
+{
+ ir_node *ptr = get_Sel_ptr(sel);
+
+ while (is_Sel(ptr)) {
+ sel = ptr;
+ ptr = get_Sel_ptr(sel);
+ }
+ return ptr;
+} /* find_base_adr */
+
+/**
+ * Update info on Load/Store address statistics.
+ */
+static void stat_update_address(ir_node *node, graph_entry_t *graph)
+{
+ unsigned opc = get_irn_opcode(node);
+ ir_node *base;
+ ir_graph *irg;
+
+ switch (opc) {
+ case iro_SymConst:
+ /* a global address */
+ cnt_inc(&graph->cnt[gcnt_global_adr]);
+ break;
+ case iro_Sel:
+ base = find_base_adr(node);
+ irg = current_ir_graph;
+ if (base == get_irg_frame(irg)) {
+ /* a local Variable. */
+ cnt_inc(&graph->cnt[gcnt_local_adr]);
+ } else {
+ /* Pointer access */
+ if (is_Proj(base) && skip_Proj(get_Proj_pred(base)) == get_irg_start(irg)) {
+ /* pointer access through parameter, check for THIS */
+ ir_entity *ent = get_irg_entity(irg);
+
+ if (ent != NULL) {
+ ir_type *ent_tp = get_entity_type(ent);
+
+ if (get_method_calling_convention(ent_tp) & cc_this_call) {
+ if (get_Proj_proj(base) == 0) {
+ /* THIS pointer */
+ cnt_inc(&graph->cnt[gcnt_this_adr]);
+ goto end_parameter;
+ } /* if */
+ } /* if */
+ } /* if */
+ /* other parameter */
+ cnt_inc(&graph->cnt[gcnt_param_adr]);
+end_parameter: ;
+ } else {
+ /* unknown Pointer access */
+ cnt_inc(&graph->cnt[gcnt_other_adr]);
+ } /* if */
+ } /* if */
+ default:
+ ;
+ } /* switch */
+} /* stat_update_address */
+
+/**
+ * Walker for reachable nodes count.
+ */
+static void update_node_stat(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+ node_entry_t *entry;
+
+ ir_op *op = stat_get_irn_op(node);
+ int i, arity = get_irn_arity(node);
+
+ entry = opcode_get_entry(op, graph->opcode_hash);
+
+ cnt_inc(&entry->cnt_alive);
+ cnt_add_i(&graph->cnt[gcnt_edges], arity);
+
+ /* count block edges */
+ undate_block_info(node, graph);
+
+ /* count extended block edges */
+ if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
+ if (graph->irg != get_const_code_irg())
+ update_extbb_info(node, graph);
+ } /* if */
+
+ /* handle statistics for special node types */
+
+ switch (op->code) {
+ case iro_Call:
+ /* check for properties that depends on calls like recursion/leaf/indirect call */
+ stat_update_call(node, graph);
+ break;
+ case iro_Load:
+ /* check address properties */
+ stat_update_address(get_Load_ptr(node), graph);
+ break;
+ case iro_Store:
+ /* check address properties */
+ stat_update_address(get_Store_ptr(node), graph);
+ break;
+ case iro_Phi:
+ /* check for non-strict Phi nodes */
+ for (i = arity - 1; i >= 0; --i) {
+ ir_node *pred = get_Phi_pred(node, i);
+ if (is_Unknown(pred)) {
+ /* found an Unknown predecessor, graph is not strict */
+ graph->is_strict = 0;
+ break;
+ }
+ }
+ default:
+ ;
+ } /* switch */
+
+ /* we want to count the constant IN nodes, not the CSE'ed constant's itself */
+ if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
+ int i;
+
+ for (i = get_irn_arity(node) - 1; i >= 0; --i) {
+ ir_node *pred = get_irn_n(node, i);
+
+ if (is_Const(pred)) {
+ /* check properties of constants */
+ stat_update_const(status, pred, graph);
+ } /* if */
+ } /* for */
+ } /* if */
+} /* update_node_stat */
+
+/**
+ * Walker for reachable nodes count for graphs on the wait_q.
+ */
+static void update_node_stat_2(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+
+ /* check for properties that depends on calls like recursion/leaf/indirect call */
+ if (is_Call(node))
+ stat_update_call_2(node, graph);
+} /* update_node_stat_2 */
+
+/**
+ * Get the current address mark.
+ */
+static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
+{
+ address_mark_entry_t *value = (address_mark_entry_t*)set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
+
+ return value ? value->mark : 0;
+} /* get_adr_mark */
+
+/**
+ * Set the current address mark.
+ */
+static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
+{
+ address_mark_entry_t *value = (address_mark_entry_t*)set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
+
+ value->mark = val;
+} /* set_adr_mark */
+
+#undef DUMP_ADR_MODE
+
+#ifdef DUMP_ADR_MODE
+/**
+ * a vcg attribute hook: Color a node with a different color if
+ * it's identified as a part of an address expression or at least referenced
+ * by an address expression.
+ */
+static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
+{
+ ir_node *n = local ? local : node;
+ ir_graph *irg = get_irn_irg(n);
+ graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
+ unsigned mark = get_adr_mark(graph, n);
+
+ if (mark & MARK_ADDRESS_CALC)
+ fprintf(F, "color: purple");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
+ fprintf(F, "color: pink");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
+ fprintf(F, "color: lightblue");
+ else
+ return 0;
+
+ /* I know the color! */
+ return 1;
+} /* stat_adr_mark_hook */
+#endif /* DUMP_ADR_MODE */
+
+/**
+ * Return the "operational" mode of a Firm node.
+ */
+static ir_mode *get_irn_op_mode(ir_node *node)
+{
+ switch (get_irn_opcode(node)) {
+ case iro_Load:
+ return get_Load_mode(node);
+ case iro_Store:
+ return get_irn_mode(get_Store_value(node));
+ case iro_Div:
+ return get_irn_mode(get_Div_left(node));
+ case iro_Mod:
+ return get_irn_mode(get_Mod_left(node));
+ case iro_Cmp:
+ /* Cmp is no address calculation, or is it? */
+ default:
+ return get_irn_mode(node);
+ } /* switch */
+} /* get_irn_op_mode */
+
+/**
+ * Post-walker that marks every node that is an address calculation.
+ *
+ * Users of a node must be visited first. We ensure this by
+ * calling it in the post of an outs walk. This should work even in cycles,
+ * while the normal pre-walk will not.
+ */
+static void mark_address_calc(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+ ir_mode *mode = get_irn_op_mode(node);
+ int i, n;
+ unsigned mark_preds = MARK_REF_NON_ADR;
+
+ if (! mode_is_data(mode))
+ return;
+
+ if (mode_is_reference(mode)) {
+ /* a reference is calculated here, we are sure */
+ set_adr_mark(graph, node, MARK_ADDRESS_CALC);
+
+ mark_preds = MARK_REF_ADR;
+ } else {
+ unsigned mark = get_adr_mark(graph, node);
+
+ if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
+ /*
+ * this node has no reference mode, but is only
+ * referenced by address calculations
+ */
+ mark_preds = MARK_REF_ADR;
+ } /* if */
+ } /* if */
+
+ /* mark all predecessors */
+ for (i = 0, n = get_irn_arity(node); i < n; ++i) {
+ ir_node *pred = get_irn_n(node, i);
+
+ mode = get_irn_op_mode(pred);
+ if (! mode_is_data(mode))
+ continue;
+
+ set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
+ } /* for */
+} /* mark_address_calc */
+
+/**
+ * Post-walker that marks every node that is an address calculation.
+ *
+ * Users of a node must be visited first. We ensure this by
+ * calling it in the post of an outs walk. This should work even in cycles,
+ * while the normal pre-walk will not.
+ */
+static void count_adr_ops(ir_node *node, void *env)
+{
+ graph_entry_t *graph = (graph_entry_t*)env;
+ unsigned mark = get_adr_mark(graph, node);
+
+ if (mark & MARK_ADDRESS_CALC)
+ cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
+ cnt_inc(&graph->cnt[gcnt_pure_adr_ops]);
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
+ cnt_inc(&graph->cnt[gcnt_all_adr_ops]);
+} /* count_adr_ops */
+
+/**
+ * Called for every graph when the graph is either deleted or stat_dump_snapshot()
+ * is called, must recalculate all statistic info.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
+{
+ node_entry_t *entry;
+ int i;
+
+ /* clear first the alive counter in the graph */
+ foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
+ cnt_clr(&entry->cnt_alive);
+ } /* foreach_pset */
+
+ /* set pessimistic values */
+ graph->is_leaf = 1;
+ graph->is_leaf_call = LCS_UNKNOWN;
+ graph->is_recursive = 0;
+ graph->is_chain_call = 1;
+ graph->is_strict = 1;
+
+ /* create new block counter */
+ graph->block_hash = new_pset(block_cmp, 5);
+
+ /* we need dominator info */
+ if (graph->irg != get_const_code_irg()) {
+ assure_doms(graph->irg);
+
+ if (status->stat_options & FIRMSTAT_COUNT_EXTBB) {
+ /* we need extended basic blocks */
+ compute_extbb(graph->irg);
+
+ /* create new extbb counter */
+ graph->extbb_hash = new_pset(block_cmp, 5);
+ } /* if */
+ } /* if */
+
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
+
+#if 0
+ /* Uncomment this code if chain-call means call exact one. */
+ entry = opcode_get_entry(op_Call, graph->opcode_hash);
+
+ /* check if we have more than 1 call */
+ if (cnt_gt(entry->cnt_alive, 1))
+ graph->is_chain_call = 0;
+#endif
+
+ /* recursive functions are never chain calls, leafs don't have calls */
+ if (graph->is_recursive || graph->is_leaf)
+ graph->is_chain_call = 0;
+
+ /* assume we walk every graph only ONCE, we could sum here the global count */
+ foreach_pset(graph->opcode_hash, node_entry_t*, entry) {
+ node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
+
+ /* update the node counter */
+ cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
+ } /* foreach_pset */
+
+ /* count the number of address calculation */
+ if (graph->irg != get_const_code_irg()) {
+ ir_graph *rem = current_ir_graph;
+
+ assure_irg_outs(graph->irg);
+
+ /* Must be done an the outs graph */
+ current_ir_graph = graph->irg;
+ irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
+ current_ir_graph = rem;
+
+#ifdef DUMP_ADR_MODE
+ /* register the vcg hook and dump the graph for test */
+ set_dump_node_vcgattr_hook(stat_adr_mark_hook);
+ dump_ir_block_graph(graph->irg, "-adr");
+ set_dump_node_vcgattr_hook(NULL);
+#endif /* DUMP_ADR_MODE */
+
+ irg_walk_graph(graph->irg, NULL, count_adr_ops, graph);
+ } /* if */
+
+ /* count the DAG's */
+ if (status->stat_options & FIRMSTAT_COUNT_DAG)
+ count_dags_in_graph(global, graph);
+
+ /* calculate the patterns of this graph */
+ stat_calc_pattern_history(graph->irg);
+
+ /* leaf function did not call others */
+ if (graph->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ else if (graph->is_leaf_call == LCS_UNKNOWN) {
+ /* we still don't know if this graph calls leaf-functions, so enqueue */
+ pdeq_putl(status->wait_q, graph);
+ } /* if */
+
+ /* we have analyzed this graph */
+ graph->is_analyzed = 1;
+
+ /* accumulate all counter's */
+ for (i = 0; i < _gcnt_last; ++i)
+ cnt_add(&global->cnt[i], &graph->cnt[i]);
+} /* update_graph_stat */
+
+/**
+ * Called for every graph that was on the wait_q in stat_dump_snapshot()
+ * must finish all statistic info calculations.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
+{
+ (void) global;
+ if (graph->is_deleted) {
+ /* deleted, ignore */
+ return;
+ }
+
+ if (graph->irg) {
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
+
+ if (graph->is_leaf_call == LCS_UNKNOWN)
+ graph->is_leaf_call = LCS_LEAF_CALL;
+ } /* if */
+} /* update_graph_stat_2 */
+
+/**
+ * Register a dumper.
+ */
+static void stat_register_dumper(const dumper_t *dumper)
+{
+ dumper_t *p = XMALLOC(dumper_t);
+
+ memcpy(p, dumper, sizeof(*p));
+
+ p->next = status->dumper;
+ p->status = status;
+ status->dumper = p;
+
+ /* FIXME: memory leak */
+} /* stat_register_dumper */
+
+/**
+ * Dumps the statistics of an IR graph.
+ */
+static void stat_dump_graph(graph_entry_t *entry)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_graph)
+ dumper->dump_graph(dumper, entry);
+ } /* for */
+} /* stat_dump_graph */
+
+/**
+ * Calls all registered dumper functions.
+ */
+static void stat_dump_registered(graph_entry_t *entry)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->func_map) {
+ dump_graph_FUNC func;
+
+ foreach_pset(dumper->func_map, dump_graph_FUNC, func)
+ func(dumper, entry);
+ } /* if */
+ } /* for */
+} /* stat_dump_registered */
+
+/**
+ * Dumps a constant table.
+ */
+static void stat_dump_consts(const constant_info_t *tbl)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_const_tbl)
+ dumper->dump_const_tbl(dumper, tbl);
+ } /* for */
+} /* stat_dump_consts */
+
+/**
+ * Dumps the parameter distribution
+ */
+static void stat_dump_param_tbl(const distrib_tbl_t *tbl, graph_entry_t *global)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_param_tbl)
+ dumper->dump_param_tbl(dumper, tbl, global);
+ } /* for */
+} /* stat_dump_param_tbl */
+
+/**
+ * Dumps the optimization counter
+ */
+static void stat_dump_opt_cnt(const counter_t *tbl, unsigned len)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->dump_opt_cnt)
+ dumper->dump_opt_cnt(dumper, tbl, len);
+ } /* for */
+} /* stat_dump_opt_cnt */
+
+/**
+ * Initialize the dumper.
+ */
+static void stat_dump_init(const char *name)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->init)
+ dumper->init(dumper, name);
+ } /* for */
+} /* stat_dump_init */
+
+/**
+ * Finish the dumper.
+ */
+static void stat_dump_finish(void)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (dumper->finish)
+ dumper->finish(dumper);
+ } /* for */
+} /* stat_dump_finish */
+
+/**
+ * Register an additional function for all dumper.
+ */
+void stat_register_dumper_func(dump_graph_FUNC func)
+{
+ dumper_t *dumper;
+
+ for (dumper = status->dumper; dumper; dumper = dumper->next) {
+ if (! dumper->func_map)
+ dumper->func_map = pset_new_ptr(3);
+ pset_insert_ptr(dumper->func_map, (void*)func);
+ } /* for */
+} /* stat_register_dumper_func */
+
+/* ---------------------------------------------------------------------- */