* @brief Driver for the chordal register allocator.
* @author Sebastian Hack
* @date 29.11.2005
- * @version $Id$
*/
#include "config.h"
#include "pset.h"
#include "list.h"
#include "bitset.h"
-#include "iterator.h"
#include "lc_opts.h"
#include "lc_opts_enum.h"
#include "irdump.h"
#include "irdom.h"
#include "ircons.h"
-#include "irbitset.h"
#include "irnode.h"
#include "ircons.h"
+#include "irtools.h"
#include "debug.h"
#include "execfreq.h"
#include "iredges_t.h"
#include "bearch.h"
#include "beifg.h"
#include "benode.h"
-#include "bestatevent.h"
+#include "statev_t.h"
#include "bestat.h"
#include "bemodule.h"
#include "be_t.h"
static be_ra_chordal_opts_t options = {
BE_CH_DUMP_NONE,
BE_CH_LOWER_PERM_SWAP,
- BE_CH_VRFY_WARN,
- "",
- ""
+ BE_CH_VRFY_WARN
};
typedef struct post_spill_env_t {
{ NULL, 0 }
};
-static const lc_opt_enum_int_items_t lower_perm_stat_items[] = {
- { NULL, 0 }
-};
-
-static const lc_opt_enum_int_items_t dump_items[] = {
+static const lc_opt_enum_mask_items_t dump_items[] = {
{ "none", BE_CH_DUMP_NONE },
{ "spill", BE_CH_DUMP_SPILL },
{ "live", BE_CH_DUMP_LIVE },
{ "copymin", BE_CH_DUMP_COPYMIN },
{ "ssadestr", BE_CH_DUMP_SSADESTR },
{ "tree", BE_CH_DUMP_TREE_INTV },
+ { "split", BE_CH_DUMP_SPLIT },
{ "constr", BE_CH_DUMP_CONSTR },
{ "lower", BE_CH_DUMP_LOWER },
{ "spillslots", BE_CH_DUMP_SPILLSLOTS },
&options.lower_perm_opt, lower_perm_items
};
-static lc_opt_enum_int_var_t dump_var = {
+static lc_opt_enum_mask_var_t dump_var = {
&options.dump_flags, dump_items
};
};
static const lc_opt_table_entry_t be_chordal_options[] = {
- LC_OPT_ENT_ENUM_PTR ("perm", "perm lowering options", &lower_perm_var),
+ LC_OPT_ENT_ENUM_INT ("perm", "perm lowering options", &lower_perm_var),
LC_OPT_ENT_ENUM_MASK("dump", "select dump phases", &dump_var),
- LC_OPT_ENT_ENUM_PTR ("verify", "verify options", &be_ch_vrfy_var),
+ LC_OPT_ENT_ENUM_INT ("verify", "verify options", &be_ch_vrfy_var),
LC_OPT_LAST
};
}
/**
- * Checks for every reload if its user can perform the load on itself.
+ * Post-Walker: Checks for the given reload if has only one user that can perform the
+ * reload as part of its address mode.
+ * Fold the reload into the user it that is possible.
*/
static void memory_operand_walker(ir_node *irn, void *env)
{
- const ir_edge_t *edge, *ne;
- ir_node *block;
- ir_node *spill;
+ ir_node *block;
+ ir_node *spill;
(void)env;
spill = be_get_Reload_mem(irn);
block = get_nodes_block(irn);
- foreach_out_edge_safe(irn, edge, ne) {
+ foreach_out_edge_safe(irn, edge) {
ir_node *src = get_edge_src_irn(edge);
int pos = get_edge_src_pos(edge);
}
}
- /* kill the Reload */
+ /* kill the Reload if it was folded */
if (get_irn_n_edges(irn) == 0) {
ir_graph *irg = get_irn_irg(irn);
ir_mode *frame_mode = get_irn_mode(get_irn_n(irn, n_be_Reload_frame));
{
be_chordal_env_t *chordal_env = &pse->cenv;
ir_graph *irg = pse->irg;
- ir_exec_freq *exec_freq = be_get_irg_exec_freq(irg);
pse->cls = cls;
chordal_env->cls = cls;
chordal_env->border_heads = pmap_create();
chordal_env->allocatable_regs = bitset_malloc(chordal_env->cls->n_regs);
- be_assure_liveness(irg);
- be_liveness_assure_chk(be_get_irg_liveness(irg));
+ be_assure_live_chk(irg);
- stat_ev_do(pse->pre_spill_cost = be_estimate_irg_costs(irg, exec_freq));
+ if (stat_ev_enabled) {
+ pse->pre_spill_cost = be_estimate_irg_costs(irg);
+ }
/* put all ignore registers into the ignore register set. */
be_put_allocatable_regs(irg, pse->cls, chordal_env->allocatable_regs);
{
be_chordal_env_t *chordal_env = &pse->cenv;
ir_graph *irg = pse->irg;
- ir_exec_freq *exec_freq = be_get_irg_exec_freq(irg);
int allocatable_regs = be_get_n_allocatable_regs(irg, chordal_env->cls);
/* some special classes contain only ignore regs, no work to be done */
if (allocatable_regs > 0) {
- stat_ev_dbl("bechordal_spillcosts", be_estimate_irg_costs(irg, exec_freq) - pse->pre_spill_cost);
+ stat_ev_dbl("bechordal_spillcosts", be_estimate_irg_costs(irg) - pse->pre_spill_cost);
/*
If we have a backend provided spiller, post spill is
chordal_env->ifg = be_create_ifg(chordal_env);
be_timer_pop(T_RA_IFG);
- stat_ev_if {
+ if (stat_ev_enabled) {
be_ifg_stat_t stat;
be_node_stats_t node_stats;
be_timer_push(T_RA_PROLOG);
- be_assure_liveness(irg);
-
chordal_env.obst = &obst;
chordal_env.opts = &options;
chordal_env.irg = irg;
be_timer_pop(T_RA_PROLOG);
- stat_ev_if {
+ if (stat_ev_enabled) {
be_collect_node_stats(&last_node_stats, irg);
}
stat_ev_ctx_push_str("bechordal_cls", cls->name);
- stat_ev_if {
+ if (stat_ev_enabled) {
be_do_stat_reg_pressure(irg, cls);
}
- memcpy(&pse.cenv, &chordal_env, sizeof(chordal_env));
+ pse.cenv = chordal_env;
pse.irg = irg;
pre_spill(&pse, cls);
post_spill(&pse, 0);
- stat_ev_if {
+ if (stat_ev_enabled) {
be_node_stats_t node_stats;
be_collect_node_stats(&node_stats, irg);
be_timer_pop(T_VERIFY);
be_timer_push(T_RA_EPILOG);
- lower_nodes_after_ra(irg,
- options.lower_perm_opt&BE_CH_LOWER_PERM_COPY ? 1 : 0);
+ lower_nodes_after_ra(irg, options.lower_perm_opt == BE_CH_LOWER_PERM_COPY);
dump(BE_CH_DUMP_LOWER, irg, NULL, "belower-after-ra");
obstack_free(&obst, NULL);
- be_liveness_invalidate(be_get_irg_liveness(irg));
+ be_invalidate_live_sets(irg);
be_timer_pop(T_RA_EPILOG);
be_timer_pop(T_RA_OTHER);
be_register_allocator("chordal", &be_ra_chordal_allocator);
lc_opt_add_table(chordal_grp, be_chordal_options);
- be_add_module_list_opt(chordal_grp, "coloring", "select coloring methode", &colorings, (void**) &selected_coloring);
+ be_add_module_list_opt(chordal_grp, "coloring", "select coloring method", &colorings, (void**) &selected_coloring);
}