be_remove_empty_blocks() now return non-zero if the graph was changed
[libfirm] / ir / be / bechordal_main.c
1 /**
2  * @file   bechordal_main.c
3  * @date   29.11.2005
4  * @author Sebastian Hack
5  * @cvs-id $Id$
6  *
7  * Copyright (C) 2005-2006 Universitaet Karlsruhe
8  * Released under the GPL
9  *
10  * Driver for the chordal register allocator.
11  */
12 #ifdef HAVE_CONFIG_H
13 #include "config.h"
14 #endif
15
16 #include <time.h>
17
18 #include "obst.h"
19 #include "pset.h"
20 #include "list.h"
21 #include "bitset.h"
22 #include "iterator.h"
23 #include "firm_config.h"
24
25 #ifdef WITH_LIBCORE
26 #include <libcore/lc_opts.h>
27 #include <libcore/lc_opts_enum.h>
28 #include <libcore/lc_timing.h>
29 #endif /* WITH_LIBCORE */
30
31 #include "ircons_t.h"
32 #include "irmode_t.h"
33 #include "irgraph_t.h"
34 #include "irprintf_t.h"
35 #include "irgwalk.h"
36 #include "ircons.h"
37 #include "irdump.h"
38 #include "irdom.h"
39 #include "ircons.h"
40 #include "irbitset.h"
41 #include "irnode.h"
42 #include "ircons.h"
43 #include "debug.h"
44 #include "xmalloc.h"
45 #include "execfreq.h"
46
47 #include "bechordal_t.h"
48 #include "beabi.h"
49 #include "bejavacoal.h"
50 #include "beutil.h"
51 #include "besched.h"
52 #include "besched_t.h"
53 #include "belive_t.h"
54 #include "bearch.h"
55 #include "beifg_t.h"
56 #include "beifg_impl.h"
57 #include "benode_t.h"
58 #include "bestatevent.h"
59 #include "bestat.h"
60 #include "bemodule.h"
61
62 #include "bespillbelady.h"
63 #include "bespillmorgan.h"
64 #include "bespillslots.h"
65 #include "bespilloptions.h"
66 #include "belower.h"
67
68 #ifdef WITH_ILP
69 #include "bespillremat.h"
70 #endif /* WITH_ILP */
71
72 #include "bejavacoal.h"
73 #include "becopystat.h"
74 #include "becopyopt.h"
75 #include "bessadestr.h"
76 #include "beverify.h"
77 #include "benode_t.h"
78
79 static be_ra_chordal_opts_t options = {
80         BE_CH_DUMP_NONE,
81         BE_CH_LOWER_PERM_SWAP,
82         BE_CH_VRFY_WARN,
83 };
84
85 /** Enable extreme live range splitting. */
86 static int be_elr_split = 0;
87
88 typedef struct _post_spill_env_t {
89         be_chordal_env_t            cenv;
90         be_irg_t                    *birg;
91         const arch_register_class_t *cls;
92         double                      pre_spill_cost;
93 } post_spill_env_t;
94
95 static be_ra_timer_t ra_timer = {
96         NULL,
97         NULL,
98         NULL,
99         NULL,
100         NULL,
101         NULL,
102         NULL,
103         NULL,
104         NULL,
105         NULL,
106         NULL,
107 };
108
109 static const lc_opt_enum_int_items_t lower_perm_items[] = {
110         { "copy", BE_CH_LOWER_PERM_COPY },
111         { "swap", BE_CH_LOWER_PERM_SWAP },
112         { NULL, 0 }
113 };
114
115 static const lc_opt_enum_int_items_t lower_perm_stat_items[] = {
116         { NULL, 0 }
117 };
118
119 static const lc_opt_enum_int_items_t dump_items[] = {
120         { "none",       BE_CH_DUMP_NONE       },
121         { "spill",      BE_CH_DUMP_SPILL      },
122         { "live",       BE_CH_DUMP_LIVE       },
123         { "color",      BE_CH_DUMP_COLOR      },
124         { "copymin",    BE_CH_DUMP_COPYMIN    },
125         { "ssadestr",   BE_CH_DUMP_SSADESTR   },
126         { "tree",       BE_CH_DUMP_TREE_INTV  },
127         { "constr",     BE_CH_DUMP_CONSTR     },
128         { "lower",      BE_CH_DUMP_LOWER      },
129         { "spillslots", BE_CH_DUMP_SPILLSLOTS },
130         { "appel",      BE_CH_DUMP_APPEL      },
131         { "all",        BE_CH_DUMP_ALL        },
132         { NULL, 0 }
133 };
134
135 static const lc_opt_enum_int_items_t be_ch_vrfy_items[] = {
136         { "off",    BE_CH_VRFY_OFF    },
137         { "warn",   BE_CH_VRFY_WARN   },
138         { "assert", BE_CH_VRFY_ASSERT },
139         { NULL, 0 }
140 };
141
142 static lc_opt_enum_int_var_t lower_perm_var = {
143         &options.lower_perm_opt, lower_perm_items
144 };
145
146 static lc_opt_enum_int_var_t dump_var = {
147         &options.dump_flags, dump_items
148 };
149
150 static lc_opt_enum_int_var_t be_ch_vrfy_var = {
151         &options.vrfy_option, be_ch_vrfy_items
152 };
153
154 static const lc_opt_table_entry_t be_chordal_options[] = {
155         LC_OPT_ENT_ENUM_PTR ("perm",          "perm lowering options", &lower_perm_var),
156         LC_OPT_ENT_ENUM_MASK("dump",          "select dump phases", &dump_var),
157         LC_OPT_ENT_ENUM_PTR ("vrfy",          "verify options", &be_ch_vrfy_var),
158         LC_OPT_ENT_BOOL     ("elrsplit",      "enable extreme live range splitting", &be_elr_split),
159         { NULL }
160 };
161
162 static void dump(unsigned mask, ir_graph *irg,
163                                  const arch_register_class_t *cls,
164                                  const char *suffix,
165                                  void (*dump_func)(ir_graph *, const char *))
166 {
167         if((options.dump_flags & mask) == mask) {
168                 if (cls) {
169                         char buf[256];
170                         snprintf(buf, sizeof(buf), "-%s%s", cls->name, suffix);
171                         be_dump(irg, buf, dump_func);
172                 }
173                 else
174                         be_dump(irg, suffix, dump_func);
175         }
176 }
177
178 /**
179  * Checks for every reload if it's user can perform the load on itself.
180  */
181 static void memory_operand_walker(ir_node *irn, void *env) {
182         be_chordal_env_t *cenv = env;
183         const arch_env_t *aenv = cenv->birg->main_env->arch_env;
184         const ir_edge_t  *edge, *ne;
185         ir_node          *block;
186         ir_node          *spill;
187
188         if (! be_is_Reload(irn))
189                 return;
190
191         /* only use memory operands, if the reload is only used by 1 node */
192         if(get_irn_n_edges(irn) > 1)
193                 return;
194
195         spill = be_get_Reload_mem(irn);
196         block = get_nodes_block(irn);
197
198         foreach_out_edge_safe(irn, edge, ne) {
199                 ir_node *src = get_edge_src_irn(edge);
200                 int     pos  = get_edge_src_pos(edge);
201
202                 assert(src && "outedges broken!");
203
204                 if (get_nodes_block(src) == block && arch_possible_memory_operand(aenv, src, pos)) {
205                         DBG((cenv->dbg, LEVEL_3, "performing memory operand %+F at %+F\n", irn, src));
206                         arch_perform_memory_operand(aenv, src, spill, pos);
207                 }
208         }
209
210         /* kill the Reload */
211         if (get_irn_n_edges(irn) == 0) {
212                 sched_remove(irn);
213                 set_irn_n(irn, be_pos_Reload_mem, new_Bad());
214         }
215 }
216
217 /**
218  * Starts a walk for memory operands if supported by the backend.
219  */
220 static INLINE void check_for_memory_operands(be_chordal_env_t *chordal_env) {
221         irg_walk_graph(chordal_env->irg, NULL, memory_operand_walker, chordal_env);
222 }
223
224 /**
225  * Sorry for doing stats again...
226  */
227 typedef struct _node_stat_t {
228         unsigned int n_phis;      /**< Phis of the current register class. */
229         unsigned int n_mem_phis;  /**< Memory Phis (Phis with spill operands). */
230         unsigned int n_copies;    /**< Copies */
231         unsigned int n_perms;     /**< Perms */
232         unsigned int n_spills;    /**< Spill nodes */
233         unsigned int n_reloads;   /**< Reloads */
234 } node_stat_t;
235
236 struct node_stat_walker {
237         node_stat_t      *stat;
238         const arch_env_t *arch_env;
239         bitset_t         *mem_phis;
240         const arch_register_class_t *cls;
241 };
242
243 static void node_stat_walker(ir_node *irn, void *data)
244 {
245         struct node_stat_walker *env  = data;
246         const arch_env_t        *aenv = env->arch_env;
247
248         if (arch_irn_consider_in_reg_alloc(aenv, env->cls, irn)) {
249
250                 /* if the node is a normal phi */
251                 if(is_Phi(irn))
252                         env->stat->n_phis++;
253
254                 else if(arch_irn_classify(aenv, irn) & arch_irn_class_spill)
255                         ++env->stat->n_spills;
256
257                 else if(arch_irn_classify(aenv, irn) & arch_irn_class_reload)
258                         ++env->stat->n_reloads;
259
260                 else if(arch_irn_classify(aenv, irn) & arch_irn_class_copy)
261                         ++env->stat->n_copies;
262
263                 else if(arch_irn_classify(aenv, irn) & arch_irn_class_perm)
264                         ++env->stat->n_perms;
265         }
266
267         /* a mem phi is a PhiM with a mem phi operand or a Spill operand */
268         else if(is_Phi(irn) && get_irn_mode(irn) == mode_M) {
269                 int i;
270
271                 for(i = get_irn_arity(irn) - 1; i >= 0; --i) {
272                         ir_node *op = get_irn_n(irn, i);
273
274                         if((is_Phi(op) && bitset_contains_irn(env->mem_phis, op)) || (arch_irn_classify(aenv, op) & arch_irn_class_spill)) {
275                                 bitset_add_irn(env->mem_phis, irn);
276                                 env->stat->n_mem_phis++;
277                                 break;
278                         }
279                 }
280         }
281 }
282
283 static void node_stats(be_irg_t *birg, const arch_register_class_t *cls, node_stat_t *stat)
284 {
285         struct node_stat_walker env;
286
287         memset(stat, 0, sizeof(stat[0]));
288         env.arch_env = birg->main_env->arch_env;
289         env.mem_phis = bitset_irg_malloc(birg->irg);
290         env.stat     = stat;
291         env.cls      = cls;
292         irg_walk_graph(birg->irg, NULL, node_stat_walker, &env);
293         bitset_free(env.mem_phis);
294 }
295
296 static void insn_count_walker(ir_node *irn, void *data)
297 {
298         int *cnt = data;
299
300         switch(get_irn_opcode(irn)) {
301         case iro_Proj:
302         case iro_Phi:
303         case iro_Start:
304         case iro_End:
305                 break;
306         default:
307                 (*cnt)++;
308         }
309 }
310
311 static unsigned int count_insns(ir_graph *irg)
312 {
313         int cnt = 0;
314         irg_walk_graph(irg, insn_count_walker, NULL, &cnt);
315         return cnt;
316 }
317
318 #ifdef WITH_LIBCORE
319 /**
320  * Initialize all timers.
321  */
322 static void be_init_timer(be_options_t *main_opts)
323 {
324         if (main_opts->timing == BE_TIME_ON) {
325                 ra_timer.t_prolog     = lc_timer_register("ra_prolog",     "regalloc prolog");
326                 ra_timer.t_epilog     = lc_timer_register("ra_epilog",     "regalloc epilog");
327                 ra_timer.t_live       = lc_timer_register("ra_liveness",   "be liveness");
328                 ra_timer.t_spill      = lc_timer_register("ra_spill",      "spiller");
329                 ra_timer.t_spillslots = lc_timer_register("ra_spillslots", "spillslots");
330                 ra_timer.t_color      = lc_timer_register("ra_color",      "graph coloring");
331                 ra_timer.t_ifg        = lc_timer_register("ra_ifg",        "interference graph");
332                 ra_timer.t_copymin    = lc_timer_register("ra_copymin",    "copy minimization");
333                 ra_timer.t_ssa        = lc_timer_register("ra_ssadestr",   "ssa destruction");
334                 ra_timer.t_verify     = lc_timer_register("ra_verify",     "graph verification");
335                 ra_timer.t_other      = lc_timer_register("ra_other",      "other time");
336
337                 LC_STOP_AND_RESET_TIMER(ra_timer.t_prolog);
338                 LC_STOP_AND_RESET_TIMER(ra_timer.t_epilog);
339                 LC_STOP_AND_RESET_TIMER(ra_timer.t_live);
340                 LC_STOP_AND_RESET_TIMER(ra_timer.t_spill);
341                 LC_STOP_AND_RESET_TIMER(ra_timer.t_spillslots);
342                 LC_STOP_AND_RESET_TIMER(ra_timer.t_color);
343                 LC_STOP_AND_RESET_TIMER(ra_timer.t_ifg);
344                 LC_STOP_AND_RESET_TIMER(ra_timer.t_copymin);
345                 LC_STOP_AND_RESET_TIMER(ra_timer.t_ssa);
346                 LC_STOP_AND_RESET_TIMER(ra_timer.t_verify);
347                 LC_STOP_AND_RESET_TIMER(ra_timer.t_other);
348         }
349 }
350
351 #define BE_TIMER_INIT(main_opts)        be_init_timer(main_opts)
352
353 #define BE_TIMER_PUSH(timer)                                                            \
354         if (main_opts->timing == BE_TIME_ON) {                                              \
355                 if (! lc_timer_push(timer)) {                                                   \
356                         if (options.vrfy_option == BE_CH_VRFY_ASSERT)                               \
357                                 assert(!"Timer already on stack, cannot be pushed twice.");             \
358                         else if (options.vrfy_option == BE_CH_VRFY_WARN)                            \
359                                 fprintf(stderr, "Timer %s already on stack, cannot be pushed twice.\n", \
360                                         lc_timer_get_name(timer));                                          \
361                 }                                                                               \
362         }
363 #define BE_TIMER_POP(timer)                                                                    \
364         if (main_opts->timing == BE_TIME_ON) {                                                     \
365                 lc_timer_t *tmp = lc_timer_pop();                                                      \
366                 if (options.vrfy_option == BE_CH_VRFY_ASSERT)                                          \
367                         assert(tmp == timer && "Attempt to pop wrong timer.");                             \
368                 else if (options.vrfy_option == BE_CH_VRFY_WARN && tmp != timer)                       \
369                         fprintf(stderr, "Attempt to pop wrong timer. %s is on stack, trying to pop %s.\n", \
370                                 lc_timer_get_name(tmp), lc_timer_get_name(timer));                             \
371                 timer = tmp;                                                                           \
372         }
373 #else
374
375 #define BE_TIMER_INIT(main_opts)
376 #define BE_TIMER_PUSH(timer)
377 #define BE_TIMER_POP(timer)
378
379 #endif /* WITH_LIBCORE */
380
381 /**
382  * Perform things which need to be done per register class before spilling.
383  */
384 static void pre_spill(const arch_isa_t *isa, int cls_idx, post_spill_env_t *pse) {
385         be_chordal_env_t *chordal_env = &pse->cenv;
386         be_irg_t         *birg        = pse->birg;
387         node_stat_t      node_stat;
388
389         pse->cls                   = arch_isa_get_reg_class(isa, cls_idx);
390         chordal_env->cls           = pse->cls;
391         chordal_env->border_heads  = pmap_create();
392         chordal_env->ignore_colors = bitset_malloc(chordal_env->cls->n_regs);
393
394 #ifdef FIRM_STATISTICS
395         if (be_stat_ev_is_active()) {
396                 be_stat_tags[STAT_TAG_CLS] = pse->cls->name;
397                 be_stat_ev_push(be_stat_tags, STAT_TAG_LAST, be_stat_file);
398
399                 /* perform some node statistics. */
400                 node_stats(birg, pse->cls, &node_stat);
401                 be_stat_ev("phis_before_spill", node_stat.n_phis);
402         }
403 #endif /* FIRM_STATISTICS */
404
405         /* put all ignore registers into the ignore register set. */
406         be_put_ignore_regs(birg, pse->cls, chordal_env->ignore_colors);
407
408         be_pre_spill_prepare_constr(chordal_env);
409         dump(BE_CH_DUMP_CONSTR, birg->irg, pse->cls, "-constr-pre", dump_ir_block_graph_sched);
410
411 #ifdef FIRM_STATISTICS
412         if (be_stat_ev_is_active()) {
413                 pse->pre_spill_cost = be_estimate_irg_costs(birg->irg,
414                         birg->main_env->arch_env, birg->exec_freq);
415         }
416 #endif /* FIRM_STATISTICS */
417 }
418
419 /**
420  * Perform things which need to be done per register class after spilling.
421  */
422 static void post_spill(post_spill_env_t *pse, int iteration) {
423         be_chordal_env_t    *chordal_env = &pse->cenv;
424         be_irg_t            *birg        = pse->birg;
425         ir_graph            *irg         = birg->irg;
426         const be_main_env_t *main_env    = birg->main_env;
427         be_options_t        *main_opts   = main_env->options;
428         static int          splitted     = 0;
429         node_stat_t         node_stat;
430
431 #ifdef FIRM_STATISTICS
432         if (be_stat_ev_is_active()) {
433                 double spillcosts = be_estimate_irg_costs(irg, main_env->arch_env, birg->exec_freq) - pse->pre_spill_cost;
434
435                 be_stat_ev_l("spillcosts", (long) spillcosts);
436
437                 node_stats(birg, pse->cls, &node_stat);
438                 be_stat_ev("phis_after_spill", node_stat.n_phis);
439                 be_stat_ev("mem_phis", node_stat.n_mem_phis);
440                 be_stat_ev("reloads", node_stat.n_reloads);
441                 be_stat_ev("spills", node_stat.n_spills);
442         }
443 #endif /* FIRM_STATISTICS */
444
445         /*
446                 If we have a backend provided spiller, post spill is
447                 called in a loop after spilling for each register class.
448                 But we only need to fix stack nodes once in this case.
449         */
450         if (iteration == 0) {
451                 check_for_memory_operands(chordal_env);
452                 be_abi_fix_stack_nodes(birg->abi, birg->lv);
453         }
454
455         BE_TIMER_PUSH(ra_timer.t_verify);
456
457         /* verify schedule and register pressure */
458         if (chordal_env->opts->vrfy_option == BE_CH_VRFY_WARN) {
459                 be_verify_schedule(irg);
460                 be_verify_register_pressure(birg, pse->cls, irg);
461         }
462         else if (chordal_env->opts->vrfy_option == BE_CH_VRFY_ASSERT) {
463                 assert(be_verify_schedule(irg) && "Schedule verification failed");
464                 assert(be_verify_register_pressure(birg, pse->cls, irg)
465                         && "Register pressure verification failed");
466         }
467         BE_TIMER_POP(ra_timer.t_verify);
468
469         if (be_elr_split && ! splitted) {
470                 extreme_liverange_splitting(chordal_env);
471                 splitted = 1;
472         }
473
474         /* Color the graph. */
475         BE_TIMER_PUSH(ra_timer.t_color);
476         be_ra_chordal_color(chordal_env);
477         BE_TIMER_POP(ra_timer.t_color);
478
479         dump(BE_CH_DUMP_CONSTR, irg, pse->cls, "-color", dump_ir_block_graph_sched);
480
481         /* Create the ifg with the selected flavor */
482         BE_TIMER_PUSH(ra_timer.t_ifg);
483         chordal_env->ifg = be_create_ifg(chordal_env);
484         BE_TIMER_POP(ra_timer.t_ifg);
485
486 #ifdef FIRM_STATISTICS
487         if (be_stat_ev_is_active()) {
488                 be_ifg_stat_t stat;
489
490                 be_ifg_stat(birg, chordal_env->ifg, &stat);
491                 be_stat_ev("ifg_nodes", stat.n_nodes);
492                 be_stat_ev("ifg_edges", stat.n_edges);
493                 be_stat_ev("ifg_comps", stat.n_comps);
494
495                 node_stats(birg, pse->cls, &node_stat);
496                 be_stat_ev("perms_before_coal", node_stat.n_perms);
497                 be_stat_ev("copies_before_coal", node_stat.n_copies);
498         }
499 #endif /* FIRM_STATISTICS */
500
501         /* copy minimization */
502         BE_TIMER_PUSH(ra_timer.t_copymin);
503         co_driver(chordal_env);
504         BE_TIMER_POP(ra_timer.t_copymin);
505
506         dump(BE_CH_DUMP_COPYMIN, irg, pse->cls, "-copymin", dump_ir_block_graph_sched);
507
508         BE_TIMER_PUSH(ra_timer.t_ssa);
509
510         /* ssa destruction */
511         be_ssa_destruction(chordal_env);
512
513         BE_TIMER_POP(ra_timer.t_ssa);
514
515         dump(BE_CH_DUMP_SSADESTR, irg, pse->cls, "-ssadestr", dump_ir_block_graph_sched);
516
517         BE_TIMER_PUSH(ra_timer.t_verify);
518         if (chordal_env->opts->vrfy_option != BE_CH_VRFY_OFF) {
519                 be_ssa_destruction_check(chordal_env);
520         }
521         BE_TIMER_POP(ra_timer.t_verify);
522
523         /* free some data structures */
524         be_ifg_free(chordal_env->ifg);
525         pmap_destroy(chordal_env->border_heads);
526         bitset_free(chordal_env->ignore_colors);
527
528 #ifdef FIRM_STATISTICS
529         if (be_stat_ev_is_active()) {
530                 node_stats(birg, pse->cls, &node_stat);
531                 be_stat_ev("perms_after_coal", node_stat.n_perms);
532                 be_stat_ev("copies_after_coal", node_stat.n_copies);
533                 be_stat_ev_pop();
534         }
535 #endif /* FIRM_STATISTICS */
536 }
537
538 /**
539  * Performs chordal register allocation for each register class on given irg.
540  *
541  * @param birg  Backend irg object
542  * @return Structure containing timer for the single phases or NULL if no timing requested.
543  */
544 static void be_ra_chordal_main(be_irg_t *birg)
545 {
546         const be_main_env_t *main_env  = birg->main_env;
547         const arch_isa_t    *isa       = arch_env_get_isa(main_env->arch_env);
548         ir_graph            *irg       = birg->irg;
549         be_options_t        *main_opts = main_env->options;
550         int                 j, m;
551         be_chordal_env_t    chordal_env;
552
553         BE_TIMER_INIT(main_opts);
554         BE_TIMER_PUSH(ra_timer.t_other);
555         BE_TIMER_PUSH(ra_timer.t_prolog);
556
557         be_assure_dom_front(birg);
558         be_assure_liveness(birg);
559
560         chordal_env.opts      = &options;
561         chordal_env.irg       = irg;
562         chordal_env.birg      = birg;
563         FIRM_DBG_REGISTER(chordal_env.dbg, "firm.be.chordal");
564
565         obstack_init(&chordal_env.obst);
566
567         BE_TIMER_POP(ra_timer.t_prolog);
568
569         be_stat_ev("insns_before", count_insns(irg));
570
571         if (! arch_code_generator_has_spiller(birg->cg)) {
572                 /* use one of the generic spiller */
573
574                 /* Perform the following for each register class. */
575                 for (j = 0, m = arch_isa_get_n_reg_class(isa); j < m; ++j) {
576                         post_spill_env_t pse;
577
578                         memcpy(&pse.cenv, &chordal_env, sizeof(chordal_env));
579                         pse.birg = birg;
580                         pre_spill(isa, j, &pse);
581
582                         BE_TIMER_PUSH(ra_timer.t_spill);
583                         be_do_spill(birg, pse.cls);
584                         BE_TIMER_POP(ra_timer.t_spill);
585
586                         dump(BE_CH_DUMP_SPILL, irg, pse.cls, "-spill", dump_ir_block_graph_sched);
587
588                         post_spill(&pse, 0);
589                 }
590         }
591         else {
592                 post_spill_env_t *pse;
593
594                 /* the backend has it's own spiller */
595                 m = arch_isa_get_n_reg_class(isa);
596
597                 pse = alloca(m * sizeof(pse[0]));
598
599                 for (j = 0; j < m; ++j) {
600                         memcpy(&pse[j].cenv, &chordal_env, sizeof(chordal_env));
601                         pse[j].birg = birg;
602                         pre_spill(isa, j, &pse[j]);
603                 }
604
605                 BE_TIMER_PUSH(ra_timer.t_spill);
606                 arch_code_generator_spill(birg->cg, birg);
607                 BE_TIMER_POP(ra_timer.t_spill);
608                 dump(BE_CH_DUMP_SPILL, irg, NULL, "-spill", dump_ir_block_graph_sched);
609
610                 for (j = 0; j < m; ++j) {
611                         post_spill(&pse[j], j);
612                 }
613         }
614
615         BE_TIMER_PUSH(ra_timer.t_spillslots);
616
617         be_coalesce_spillslots(&chordal_env);
618         dump(BE_CH_DUMP_SPILLSLOTS, irg, NULL, "-spillslots", dump_ir_block_graph_sched);
619
620         BE_TIMER_POP(ra_timer.t_spillslots);
621
622         BE_TIMER_PUSH(ra_timer.t_verify);
623         /* verify spillslots */
624         if (options.vrfy_option == BE_CH_VRFY_WARN) {
625                 be_verify_spillslots(main_env->arch_env, irg);
626         }
627         else if (options.vrfy_option == BE_CH_VRFY_ASSERT) {
628                 assert(be_verify_spillslots(main_env->arch_env, irg) && "Spillslot verification failed");
629         }
630         BE_TIMER_POP(ra_timer.t_verify);
631
632         BE_TIMER_PUSH(ra_timer.t_epilog);
633         dump(BE_CH_DUMP_LOWER, irg, NULL, "-spilloff", dump_ir_block_graph_sched);
634
635         lower_nodes_after_ra(birg, options.lower_perm_opt & BE_CH_LOWER_PERM_COPY ? 1 : 0);
636         dump(BE_CH_DUMP_LOWER, irg, NULL, "-belower-after-ra", dump_ir_block_graph_sched);
637
638         obstack_free(&chordal_env.obst, NULL);
639         BE_TIMER_POP(ra_timer.t_epilog);
640
641         BE_TIMER_POP(ra_timer.t_other);
642
643         be_stat_ev("insns_after", count_insns(irg));
644
645         return;
646 }
647
648 static be_ra_t be_ra_chordal_allocator = {
649         be_ra_chordal_main,
650 };
651
652 void be_init_chordal(void)
653 {
654         lc_opt_entry_t *be_grp = lc_opt_get_grp(firm_opt_get_root(), "be");
655         lc_opt_entry_t *ra_grp = lc_opt_get_grp(be_grp, "ra");
656         lc_opt_entry_t *chordal_grp = lc_opt_get_grp(ra_grp, "chordal");
657
658         lc_opt_add_table(chordal_grp, be_chordal_options);
659
660         be_register_allocator("chordal", &be_ra_chordal_allocator);
661 }
662
663 BE_REGISTER_MODULE_CONSTRUCTOR(be_init_chordal);