+static void node_stats(be_irg_t *birg, const arch_register_class_t *cls, node_stat_t *stat)
+{
+ struct node_stat_walker env;
+
+ memset(stat, 0, sizeof(stat[0]));
+ env.arch_env = birg->main_env->arch_env;
+ env.mem_phis = bitset_irg_malloc(birg->irg);
+ env.stat = stat;
+ env.cls = cls;
+ irg_walk_graph(birg->irg, NULL, node_stat_walker, &env);
+ bitset_free(env.mem_phis);
+}
+
+static void insn_count_walker(ir_node *irn, void *data)
+{
+ int *cnt = data;
+
+ switch(get_irn_opcode(irn)) {
+ case iro_Proj:
+ case iro_Phi:
+ case iro_Start:
+ case iro_End:
+ break;
+ default:
+ (*cnt)++;
+ }
+}
+
+static unsigned int count_insns(ir_graph *irg)
+{
+ int cnt = 0;
+ irg_walk_graph(irg, insn_count_walker, NULL, &cnt);
+ return cnt;
+}
+
+/**
+ * Initialize all timers.
+ */
+static void be_init_timer(be_options_t *main_opts)
+{
+ if (main_opts->timing == BE_TIME_ON) {
+ ra_timer.t_prolog = lc_timer_register("ra_prolog", "regalloc prolog");
+ ra_timer.t_epilog = lc_timer_register("ra_epilog", "regalloc epilog");
+ ra_timer.t_live = lc_timer_register("ra_liveness", "be liveness");
+ ra_timer.t_spill = lc_timer_register("ra_spill", "spiller");
+ ra_timer.t_spillslots = lc_timer_register("ra_spillslots", "spillslots");
+ ra_timer.t_color = lc_timer_register("ra_color", "graph coloring");
+ ra_timer.t_ifg = lc_timer_register("ra_ifg", "interference graph");
+ ra_timer.t_copymin = lc_timer_register("ra_copymin", "copy minimization");
+ ra_timer.t_ssa = lc_timer_register("ra_ssadestr", "ssa destruction");
+ ra_timer.t_verify = lc_timer_register("ra_verify", "graph verification");
+ ra_timer.t_other = lc_timer_register("ra_other", "other time");
+
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_prolog);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_epilog);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_live);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_spill);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_spillslots);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_color);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_ifg);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_copymin);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_ssa);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_verify);
+ LC_STOP_AND_RESET_TIMER(ra_timer.t_other);
+
+ global_ra_timer = &ra_timer;
+ }
+}
+
+#define BE_TIMER_INIT(main_opts) be_init_timer(main_opts)
+
+#define BE_TIMER_PUSH(timer) \
+ if (main_opts->timing == BE_TIME_ON) { \
+ if (! lc_timer_push(timer)) { \
+ if (options.vrfy_option == BE_CH_VRFY_ASSERT) \
+ assert(!"Timer already on stack, cannot be pushed twice."); \
+ else if (options.vrfy_option == BE_CH_VRFY_WARN) \
+ fprintf(stderr, "Timer %s already on stack, cannot be pushed twice.\n", \
+ lc_timer_get_name(timer)); \
+ } \
+ }
+#define BE_TIMER_POP(timer) \
+ if (main_opts->timing == BE_TIME_ON) { \
+ lc_timer_t *tmp = lc_timer_pop(); \
+ if (options.vrfy_option == BE_CH_VRFY_ASSERT) \
+ assert(tmp == timer && "Attempt to pop wrong timer."); \
+ else if (options.vrfy_option == BE_CH_VRFY_WARN && tmp != timer) \
+ fprintf(stderr, "Attempt to pop wrong timer. %s is on stack, trying to pop %s.\n", \
+ lc_timer_get_name(tmp), lc_timer_get_name(timer)); \
+ timer = tmp; \
+ }
+
+/**
+ * Perform things which need to be done per register class before spilling.
+ */
+static void pre_spill(const arch_isa_t *isa, int cls_idx, post_spill_env_t *pse) {
+ be_chordal_env_t *chordal_env = &pse->cenv;
+ be_irg_t *birg = pse->birg;
+ node_stat_t node_stat;
+
+ pse->cls = arch_isa_get_reg_class(isa, cls_idx);
+ chordal_env->cls = pse->cls;
+ chordal_env->border_heads = pmap_create();
+ chordal_env->ignore_colors = bitset_malloc(chordal_env->cls->n_regs);
+
+#ifdef FIRM_STATISTICS
+ if (be_stat_ev_is_active()) {
+ be_stat_tags[STAT_TAG_CLS] = pse->cls->name;
+ be_stat_ev_push(be_stat_tags, STAT_TAG_LAST, be_stat_file);
+
+ /* perform some node statistics. */
+ node_stats(birg, pse->cls, &node_stat);
+ be_stat_ev("phis_before_spill", node_stat.n_phis);
+ }
+#endif /* FIRM_STATISTICS */
+
+ /* put all ignore registers into the ignore register set. */
+ be_put_ignore_regs(birg, pse->cls, chordal_env->ignore_colors);
+
+ be_pre_spill_prepare_constr(chordal_env);
+ dump(BE_CH_DUMP_CONSTR, birg->irg, pse->cls, "-constr-pre", dump_ir_block_graph_sched);
+
+#ifdef FIRM_STATISTICS
+ if (be_stat_ev_is_active()) {
+ pse->pre_spill_cost = be_estimate_irg_costs(birg->irg,
+ birg->main_env->arch_env, birg->exec_freq);
+ be_stat_ev_pop();
+ }
+#endif /* FIRM_STATISTICS */
+}
+
+/**
+ * Perform things which need to be done per register class after spilling.
+ */
+static void post_spill(post_spill_env_t *pse, int iteration) {
+ be_chordal_env_t *chordal_env = &pse->cenv;
+ be_irg_t *birg = pse->birg;
+ ir_graph *irg = birg->irg;
+ const be_main_env_t *main_env = birg->main_env;
+ be_options_t *main_opts = main_env->options;
+ node_stat_t node_stat;
+ int colors_n = arch_register_class_n_regs(chordal_env->cls);
+ int allocatable_regs = colors_n - be_put_ignore_regs(birg, chordal_env->cls, NULL);
+
+ /* some special classes contain only ignore regs, no work to be done */
+ if(allocatable_regs == 0)
+ return;
+
+#ifdef FIRM_STATISTICS
+ if (be_stat_ev_is_active()) {
+ double spillcosts = be_estimate_irg_costs(irg, main_env->arch_env, birg->exec_freq) - pse->pre_spill_cost;
+
+ be_stat_tags[STAT_TAG_CLS] = pse->cls->name;
+ be_stat_ev_push(be_stat_tags, STAT_TAG_LAST, be_stat_file);
+
+ be_stat_ev_l("spillcosts", (long) spillcosts);
+
+ node_stats(birg, pse->cls, &node_stat);
+ be_stat_ev("phis_after_spill", node_stat.n_phis);
+ be_stat_ev("mem_phis", node_stat.n_mem_phis);
+ be_stat_ev("reloads", node_stat.n_reloads);
+ be_stat_ev("spills", node_stat.n_spills);
+ }
+#endif /* FIRM_STATISTICS */
+
+ /*
+ If we have a backend provided spiller, post spill is
+ called in a loop after spilling for each register class.
+ But we only need to fix stack nodes once in this case.
+ */
+ if (iteration == 0) {
+ check_for_memory_operands(chordal_env);
+ be_abi_fix_stack_nodes(birg->abi);
+ }
+
+ BE_TIMER_PUSH(ra_timer.t_verify);
+
+ /* verify schedule and register pressure */
+ if (chordal_env->opts->vrfy_option == BE_CH_VRFY_WARN) {
+ be_verify_schedule(birg);
+ be_verify_register_pressure(birg, pse->cls, irg);
+ }
+ else if (chordal_env->opts->vrfy_option == BE_CH_VRFY_ASSERT) {
+ assert(be_verify_schedule(birg) && "Schedule verification failed");
+ assert(be_verify_register_pressure(birg, pse->cls, irg)
+ && "Register pressure verification failed");
+ }
+ BE_TIMER_POP(ra_timer.t_verify);
+
+ /* Color the graph. */
+ BE_TIMER_PUSH(ra_timer.t_color);
+ be_ra_chordal_color(chordal_env);
+ BE_TIMER_POP(ra_timer.t_color);
+
+ dump(BE_CH_DUMP_CONSTR, irg, pse->cls, "-color", dump_ir_block_graph_sched);