+
+ stat_ev_ctx_push_str("bechordal_cls", cls->name);
+
+ stat_ev_if {
+ be_do_stat_reg_pressure(irg, cls);
+ }
+
+ memcpy(&pse.cenv, &chordal_env, sizeof(chordal_env));
+ pse.irg = irg;
+ pre_spill(&pse, cls);
+
+ be_timer_push(T_RA_SPILL);
+ be_do_spill(irg, cls);
+ be_timer_pop(T_RA_SPILL);
+
+ dump(BE_CH_DUMP_SPILL, irg, pse.cls, "spill");
+
+ post_spill(&pse, 0);
+
+ stat_ev_if {
+ be_node_stats_t node_stats;
+
+ be_collect_node_stats(&node_stats, irg);
+ be_subtract_node_stats(&node_stats, &last_node_stats);
+ be_emit_node_stats(&node_stats, "bechordal_");
+
+ be_copy_node_stats(&last_node_stats, &node_stats);
+ stat_ev_ctx_pop("bechordal_cls");
+ }
+ }
+ } else {
+ post_spill_env_t *pse;
+
+ /* the backend has its own spiller */
+ m = arch_env_get_n_reg_class(arch_env);
+
+ pse = ALLOCAN(post_spill_env_t, m);
+
+ for (j = 0; j < m; ++j) {
+ memcpy(&pse[j].cenv, &chordal_env, sizeof(chordal_env));
+ pse[j].irg = irg;
+ pre_spill(&pse[j], pse[j].cls);
+ }
+
+ be_timer_push(T_RA_SPILL);
+ arch_code_generator_spill(be_get_irg_cg(irg), irg);
+ be_timer_pop(T_RA_SPILL);
+ dump(BE_CH_DUMP_SPILL, irg, NULL, "spill");
+
+ for (j = 0; j < m; ++j) {
+ post_spill(&pse[j], j);
+ }
+ }
+
+ be_timer_push(T_VERIFY);
+ if (chordal_env.opts->vrfy_option == BE_CH_VRFY_WARN) {
+ be_verify_register_allocation(irg);
+ } else if (chordal_env.opts->vrfy_option == BE_CH_VRFY_ASSERT) {
+ assert(be_verify_register_allocation(irg)
+ && "Register allocation invalid");
+ }
+ be_timer_pop(T_VERIFY);
+
+ be_timer_push(T_RA_EPILOG);
+ lower_nodes_after_ra(irg,
+ options.lower_perm_opt&BE_CH_LOWER_PERM_COPY ? 1 : 0);
+ dump(BE_CH_DUMP_LOWER, irg, NULL, "belower-after-ra");
+
+ obstack_free(&obst, NULL);
+ be_liveness_invalidate(be_get_irg_liveness(irg));
+ be_timer_pop(T_RA_EPILOG);
+
+ be_timer_pop(T_RA_OTHER);