+
+
+
+typedef struct _estimate_irg_costs_env_t {
+ const arch_env_t *arch_env;
+ ir_exec_freq *execfreqs;
+ double costs;
+} estimate_irg_costs_env_t;
+
+static void estimate_block_costs(ir_node *block, void *data)
+{
+ estimate_irg_costs_env_t *env = data;
+ ir_node *node;
+ double costs = 0.0;
+
+ sched_foreach(block, node) {
+ costs += arch_get_op_estimated_cost(env->arch_env, node);
+ }
+
+ env->costs += costs * get_block_execfreq(env->execfreqs, block);
+}
+
+double be_estimate_irg_costs(ir_graph *irg, const arch_env_t *arch_env, ir_exec_freq *execfreqs)
+{
+ estimate_irg_costs_env_t env;
+
+ env.arch_env = arch_env;
+ env.execfreqs = execfreqs;
+ env.costs = 0.0;
+
+ irg_block_walk_graph(irg, estimate_block_costs, NULL, &env);
+
+ return env.costs;
+}
+
+
+
+static const arch_env_t *arch_env;
+static be_node_stats_t *stats;
+
+static void node_stat_walker(ir_node *irn, void *data)
+{
+ (void) data;
+
+ /* if the node is a normal phi */
+ if(is_Phi(irn)) {
+ if (get_irn_mode(irn) == mode_M) {
+ (*stats)[BE_STAT_MEM_PHIS]++;
+ } else {
+ (*stats)[BE_STAT_PHIS]++;
+ }
+ } else {
+ arch_irn_class_t classify = arch_irn_classify(arch_env, irn);
+
+ if(classify & arch_irn_class_spill)
+ (*stats)[BE_STAT_SPILLS]++;
+ if(classify & arch_irn_class_reload)
+ (*stats)[BE_STAT_RELOADS]++;
+ if(classify & arch_irn_class_remat)
+ (*stats)[BE_STAT_REMATS]++;
+ if(classify & arch_irn_class_copy)
+ (*stats)[BE_STAT_COPIES]++;
+ if(classify & arch_irn_class_perm)
+ (*stats)[BE_STAT_PERMS]++;
+ }
+}
+
+void be_collect_node_stats(be_node_stats_t *new_stats, be_irg_t *birg)
+{
+ arch_env = birg->main_env->arch_env;
+ stats = new_stats;
+
+ memset(stats, 0, sizeof(*stats));
+ irg_walk_graph(birg->irg, NULL, node_stat_walker, NULL);
+}
+
+void be_subtract_node_stats(be_node_stats_t *stats, be_node_stats_t *sub)
+{
+ int i;
+ for (i = 0; i < BE_STAT_COUNT; ++i) {
+ (*stats)[i] -= (*sub)[i];
+ }