+ opt_clear_entry(elem);
+
+ elem->op = op;
+
+ return pset_insert(hmap, elem, op->code);
+}
+
+/**
+ * clears all counter in a block_entry_t
+ */
+static void block_clear_entry(block_entry_t *elem)
+{
+ cnt_clr(&elem->cnt_nodes);
+ cnt_clr(&elem->cnt_edges);
+ cnt_clr(&elem->cnt_in_edges);
+ cnt_clr(&elem->cnt_out_edges);
+ cnt_clr(&elem->cnt_phi_data);
+}
+
+/**
+ * Returns the associated block_entry_t for an block.
+ *
+ * @param block_nr an IR block number
+ * @param hmap a hash map containing long -> block_entry_t
+ */
+static block_entry_t *block_get_entry(long block_nr, hmap_block_entry_t *hmap)
+{
+ block_entry_t key;
+ block_entry_t *elem;
+
+ key.block_nr = block_nr;
+
+ elem = pset_find(hmap, &key, block_nr);
+ if (elem)
+ return elem;
+
+ elem = obstack_alloc(&status->cnts, sizeof(*elem));
+ memset(elem, 0, sizeof(*elem));
+
+ /* clear new counter */
+ block_clear_entry(elem);
+
+ elem->block_nr = block_nr;
+
+ return pset_insert(hmap, elem, block_nr);
+}
+
+
+/**
+ * Returns the ir_op for an IR-node,
+ * handles special cases and return pseudo op codes.
+ *
+ * @param none an IR node
+ */
+static ir_op *stat_get_irn_op(ir_node *node)
+{
+ ir_op *op = get_irn_op(node);
+
+ if (op == op_Phi && get_irn_arity(node) == 0) {
+ /* special case, a Phi0 node, count on extra counter */
+ op = status->op_Phi0 ? status->op_Phi0 : op;
+ }
+ else if (op == op_Phi && get_irn_mode(node) == mode_M) {
+ /* special case, a Memory Phi node, count on extra counter */
+ op = status->op_PhiM ? status->op_PhiM : op;
+ }
+ else if (op == op_Proj && get_irn_mode(node) == mode_M) {
+ /* special case, a Memory Proj node, count on extra counter */
+ op = status->op_ProjM ? status->op_ProjM : op;
+ }
+ else if (op == op_Mul &&
+ (get_irn_op(get_Mul_left(node)) == op_Const || get_irn_op(get_Mul_right(node)) == op_Const)) {
+ /* special case, a Multiply by a const, count on extra counter */
+ op = status->op_MulC ? status->op_MulC : op;
+ }
+ else if (op == op_Div && get_irn_op(get_Div_right(node)) == op_Const) {
+ /* special case, a division by a const, count on extra counter */
+ op = status->op_DivC ? status->op_DivC : op;
+ }
+ else if (op == op_Mod && get_irn_op(get_Mod_right(node)) == op_Const) {
+ /* special case, a module by a const, count on extra counter */
+ op = status->op_ModC ? status->op_ModC : op;
+ }
+ else if (op == op_DivMod && get_irn_op(get_DivMod_right(node)) == op_Const) {
+ /* special case, a division/modulo by a const, count on extra counter */
+ op = status->op_DivModC ? status->op_DivModC : op;
+ }
+ else if (op == op_Sel && get_irn_op(get_Sel_ptr(node)) == op_Sel) {
+ /* special case, a Sel of a Sel, count on extra counter */
+ op = status->op_SelSel ? status->op_SelSel : op;
+
+ if (get_irn_op(get_Sel_ptr(get_Sel_ptr(node))) == op_Sel) {
+ /* special case, a Sel of a Sel of a Sel, count on extra counter */
+ op = status->op_SelSelSel ? status->op_SelSelSel : op;
+ }
+ }
+
+ return op;
+}
+
+/**
+ * update the block counter
+ */
+static void undate_block_info(ir_node *node, graph_entry_t *graph)
+{
+ ir_op *op = get_irn_op(node);
+ ir_node *block;
+ block_entry_t *b_entry;
+ int i, arity;
+
+ /* check for block */
+ if (op == op_Block) {
+ arity = get_irn_arity(node);
+ b_entry = block_get_entry(get_irn_node_nr(node), graph->block_hash);
+
+ /* count all incoming edges */
+ for (i = 0; i < arity; ++i) {
+ ir_node *pred = get_irn_n(node, i);
+ ir_node *other_block = get_nodes_block(pred);
+ block_entry_t *b_entry_other = block_get_entry(get_irn_node_nr(other_block), graph->block_hash);
+
+ cnt_inc(&b_entry->cnt_in_edges); /* an edge coming from another block */
+ cnt_inc(&b_entry_other->cnt_out_edges);
+ }
+ return;
+ }
+ else if (op == op_Phi && mode_is_datab(get_irn_mode(node))) {
+ /* count data Phi */
+ ir_node *block = get_nodes_block(node);
+ block_entry_t *b_entry = block_get_entry(get_irn_node_nr(block), graph->block_hash);
+
+ cnt_inc(&b_entry->cnt_phi_data);
+ }
+
+ block = get_nodes_block(node);
+ b_entry = block_get_entry(get_irn_node_nr(block), graph->block_hash);
+
+ /* we have a new nodes */
+ cnt_inc(&b_entry->cnt_nodes);
+
+ arity = get_irn_arity(node);
+
+ for (i = 0; i < arity; ++i) {
+ ir_node *pred = get_irn_n(node, i);
+ ir_node *other_block;
+
+ if (get_irn_op(pred) == op_Block)
+ continue;
+
+ other_block = get_nodes_block(pred);
+
+ if (other_block == block)
+ cnt_inc(&b_entry->cnt_edges); /* a in block edge */
+ else {
+ block_entry_t *b_entry_other = block_get_entry(get_irn_node_nr(other_block), graph->block_hash);
+
+ cnt_inc(&b_entry->cnt_in_edges); /* an edge coming from another block */
+ cnt_inc(&b_entry_other->cnt_out_edges);
+ }
+ }
+}
+
+/** calculates how many arguments of the call are const */
+static int cnt_const_args(ir_node *call)
+{
+ int i, res = 0;
+ int n = get_Call_n_params(call);
+
+ for (i = 0; i < n; ++i) {
+ ir_node *param = get_Call_param(call, i);
+ ir_op *op = get_irn_op(param);
+
+ if (op == op_Const || op == op_SymConst)
+ ++res;
+ }
+ return res;
+}
+
+/**
+ * update info on calls
+ *
+ * @param call The call
+ * @param graph The graph entry containing the call
+ */
+static void stat_update_call(ir_node *call, graph_entry_t *graph)
+{
+ ir_node *block = get_nodes_block(call);
+ ir_node *ptr = get_Call_ptr(call);
+ entity *ent = NULL;
+ ir_graph *callee = NULL;
+ int num_const_args;
+
+ /*
+ * If the block is bad, the whole subgraph will collapse later
+ * so do not count this call.
+ * This happens in dead code.
+ */
+ if (is_Bad(block))
+ return;
+
+ cnt_inc(&graph->cnt_all_calls);
+
+ /* found a call, this function is not a leaf */
+ graph->is_leaf = 0;
+
+ if (get_irn_op(ptr) == op_SymConst) {
+ if (get_SymConst_kind(ptr) == symconst_addr_ent) {
+ /* ok, we seems to know the entity */
+ ent = get_SymConst_entity(ptr);
+ callee = get_entity_irg(ent);
+
+ /* it is recursive, if it calls at least once */
+ if (callee == graph->irg)
+ graph->is_recursive = 1;
+ }
+ }
+ else {
+ /* indirect call, be could not predict */
+ cnt_inc(&graph->cnt_indirect_calls);
+
+ /* NOT a leaf call */
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ }
+
+ /* check, if it's a chain-call: Then, the call-block
+ * must dominate the end block. */
+ {
+ ir_node *curr = get_irg_end_block(graph->irg);
+ int depth = get_Block_dom_depth(block);
+
+ for (; curr != block && get_Block_dom_depth(curr) > depth;) {
+ curr = get_Block_idom(curr);
+
+ if (! curr || is_no_Block(curr))
+ break;
+ }
+
+ if (curr != block)
+ graph->is_chain_call = 0;
+ }
+
+ /* check, if the callee is a leaf */
+ if (callee) {
+ graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
+
+ if (called->is_analyzed) {
+ if (! called->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ }
+ }
+
+ /* check, if arguments of the call are const */
+ num_const_args = cnt_const_args(call);
+
+ if (num_const_args > 0)
+ cnt_inc(&graph->cnt_call_with_cnst_arg);
+}
+
+/**
+ * update info on calls for graphs on the wait queue
+ */
+static void stat_update_call_2(ir_node *call, graph_entry_t *graph)
+{
+ ir_node *block = get_nodes_block(call);
+ ir_node *ptr = get_Call_ptr(call);
+ entity *ent = NULL;
+ ir_graph *callee = NULL;
+
+ /*
+ * If the block is bad, the whole subgraph will collapse later
+ * so do not count this call.
+ * This happens in dead code.
+ */
+ if (is_Bad(block))
+ return;
+
+ if (get_irn_op(ptr) == op_SymConst) {
+ if (get_SymConst_kind(ptr) == symconst_addr_ent) {
+ /* ok, we seems to know the entity */
+ ent = get_SymConst_entity(ptr);
+ callee = get_entity_irg(ent);
+ }
+ }
+
+ /* check, if the callee is a leaf */
+ if (callee) {
+ graph_entry_t *called = graph_get_entry(callee, status->irg_hash);
+
+ assert(called->is_analyzed);
+
+ if (! called->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ }
+ else
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+}
+
+/**
+ * walker for reachable nodes count
+ */
+static void update_node_stat(ir_node *node, void *env)
+{
+ graph_entry_t *graph = env;
+ node_entry_t *entry;
+
+ ir_op *op = stat_get_irn_op(node);
+ int arity = get_irn_arity(node);
+
+ entry = opcode_get_entry(op, graph->opcode_hash);
+
+ cnt_inc(&entry->cnt_alive);
+ cnt_add_i(&graph->cnt_edges, arity);
+
+ /* count block edges */
+ undate_block_info(node, graph);
+
+ /* handle statistics for special node types */
+
+ if (op == op_Const) {
+ if (status->stat_options & FIRMSTAT_COUNT_CONSTS) {
+ /* check properties of constants */
+ stat_update_const(status, node, graph);
+ }
+ }
+ else if (op == op_Call) {
+ /* check for properties that depends on calls like recursion/leaf/indirect call */
+ stat_update_call(node, graph);
+ }
+}
+
+/**
+ * walker for reachable nodes count for graphs on the wait_q
+ */
+static void update_node_stat_2(ir_node *node, void *env)
+{
+ graph_entry_t *graph = env;
+
+ /* check for properties that depends on calls like recursion/leaf/indirect call */
+ if (get_irn_op(node) == op_Call)
+ stat_update_call_2(node, graph);
+}
+
+/**
+ * get the current address mark
+ */
+static unsigned get_adr_mark(graph_entry_t *graph, ir_node *node)
+{
+ address_mark_entry_t *value = set_find(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
+
+ return value ? value->mark : 0;
+}
+
+/**
+ * set the current address mark
+ */
+static void set_adr_mark(graph_entry_t *graph, ir_node *node, unsigned val)
+{
+ address_mark_entry_t *value = set_insert(graph->address_mark, &node, sizeof(*value), HASH_PTR(node));
+
+ value->mark = val;
+}
+
+/**
+ * a vcg attribute hook: Color a node with a different color if
+ * it's identified as a part of an address expression or at least referenced
+ * by an address expression.
+ */
+static int stat_adr_mark_hook(FILE *F, ir_node *node, ir_node *local)
+{
+ ir_node *n = local ? local : node;
+ ir_graph *irg = get_irn_irg(n);
+ graph_entry_t *graph = graph_get_entry(irg, status->irg_hash);
+ unsigned mark = get_adr_mark(graph, n);
+
+ if (mark & MARK_ADDRESS_CALC)
+ fprintf(F, "color: purple");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR)
+ fprintf(F, "color: pink");
+ else if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == (MARK_REF_ADR|MARK_REF_NON_ADR))
+ fprintf(F, "color: lightblue");
+ else
+ return 0;
+
+ /* I know the color! */
+ return 1;
+}
+
+/**
+ * walker that marks every node that is an address calculation
+ *
+ * predecessor nodes must be visited first. We ensure this by
+ * calling in in the post of an outs walk. This should work even in cycles,
+ * while the pre in a normal walk will not.
+ */
+static void mark_address_calc(ir_node *node, void *env)
+{
+ graph_entry_t *graph = env;
+ ir_mode *mode = get_irn_mode(node);
+ int i, n;
+ unsigned mark_preds = MARK_REF_NON_ADR;
+
+ if (! mode_is_numP(mode))
+ return;
+
+ if (mode_is_reference(mode)) {
+ /* a reference is calculated here, we are sure */
+ set_adr_mark(graph, node, MARK_ADDRESS_CALC);
+
+ mark_preds = MARK_REF_ADR;
+ }
+ else {
+ unsigned mark = get_adr_mark(graph, node);
+
+ if ((mark & (MARK_REF_ADR | MARK_REF_NON_ADR)) == MARK_REF_ADR) {
+ /*
+ * this node has not an reference mode, but is only
+ * referenced by address calculations
+ */
+ mark_preds = MARK_REF_ADR;
+ }
+ }
+
+ /* mark all predecessors */
+ for (i = 0, n = get_irn_arity(node); i < n; ++i) {
+ ir_node *pred = get_irn_n(node, i);
+
+ set_adr_mark(graph, pred, get_adr_mark(graph, pred) | mark_preds);
+ }
+}
+
+/**
+ * Called for every graph when the graph is either deleted or stat_dump_snapshot()
+ * is called, must recalculate all statistic info.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat(graph_entry_t *global, graph_entry_t *graph)
+{
+ node_entry_t *entry;
+
+ /* clear first the alive counter in the graph */
+ for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) {
+ cnt_clr(&entry->cnt_alive);
+ }
+
+ /* set pessimistic values */
+ graph->is_leaf = 1;
+ graph->is_leaf_call = LCS_UNKNOWN;
+ graph->is_recursive = 0;
+ graph->is_chain_call = 1;
+
+ /* we need dominator info */
+ if (graph->irg != get_const_code_irg())
+ if (get_irg_dom_state(graph->irg) != dom_consistent)
+ compute_doms(graph->irg);
+
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat, NULL, graph);
+
+#if 0
+ entry = opcode_get_entry(op_Call, graph->opcode_hash);
+
+ /* check if we have more than 1 call */
+ if (cnt_gt(entry->cnt_alive, 1))
+ graph->is_chain_call = 0;
+#endif
+
+ /* recursive functions are never chain calls, leafs don't have calls */
+ if (graph->is_recursive || graph->is_leaf)
+ graph->is_chain_call = 0;
+
+ /* assume we walk every graph only ONCE, we could sum here the global count */
+ for (entry = pset_first(graph->opcode_hash); entry; entry = pset_next(graph->opcode_hash)) {
+ node_entry_t *g_entry = opcode_get_entry(entry->op, global->opcode_hash);
+
+ /* update the node counter */
+ cnt_add(&g_entry->cnt_alive, &entry->cnt_alive);
+ }
+
+ /* update the edge counter */
+ cnt_add(&global->cnt_edges, &graph->cnt_edges);
+
+ /* count the number of address calculation */
+ if (graph->irg != get_const_code_irg()) {
+ ir_graph *rem = current_ir_graph;
+
+ if (get_irg_outs_state(graph->irg) != outs_consistent)
+ compute_irg_outs(graph->irg);
+
+ /* Must be done an the outs graph */
+ current_ir_graph = graph->irg;
+ irg_out_walk(get_irg_start(graph->irg), NULL, mark_address_calc, graph);
+ current_ir_graph = rem;
+
+#if 0
+ set_dump_node_vcgattr_hook(stat_adr_mark_hook);
+ dump_ir_block_graph(graph->irg, "-adr");
+ set_dump_node_vcgattr_hook(NULL);
+#endif
+ }
+
+ /* count the DAG's */
+ if (status->stat_options & FIRMSTAT_COUNT_DAG)
+ count_dags_in_graph(global, graph);
+
+ /* calculate the patterns of this graph */
+ stat_calc_pattern_history(graph->irg);
+
+ /* leaf function did not call others */
+ if (graph->is_leaf)
+ graph->is_leaf_call = LCS_NON_LEAF_CALL;
+ else if (graph->is_leaf_call == LCS_UNKNOWN) {
+ /* we still don't know if this graph calls leaf-functions, so enqueue */
+ pdeq_putl(status->wait_q, graph);
+ }
+
+ /* we have analyzed this graph */
+ graph->is_analyzed = 1;
+}
+
+/**
+ * Called for every graph that was on the wait_q in stat_dump_snapshot()
+ * must finish all statistic info calculations.
+ *
+ * @param global The global entry
+ * @param graph The current entry
+ */
+static void update_graph_stat_2(graph_entry_t *global, graph_entry_t *graph)
+{
+ if (graph->is_deleted) {
+ /* deleted, ignore */
+ return;
+ }
+
+ if (graph->irg) {
+ /* count the nodes in the graph */
+ irg_walk_graph(graph->irg, update_node_stat_2, NULL, graph);
+
+ if (graph->is_leaf_call == LCS_UNKNOWN)
+ graph->is_leaf_call = LCS_LEAF_CALL;
+ }
+}
+
+/**
+ * register a dumper
+ */
+static void stat_register_dumper(const dumper_t *dumper)
+{
+ dumper_t *p = xmalloc(sizeof(*p));
+
+ if (p) {
+ *p = *dumper;
+
+ p->next = status->dumper;
+ p->status = status;
+ status->dumper = p;
+ }
+
+ /* FIXME: memory leak */
+}