* @brief The main sparc backend driver file.
* @version $Id$
*/
-
#include "config.h"
#include "lc_opts.h"
#include "lc_opts_enum.h"
-#include "pseudo_irg.h"
#include "irgwalk.h"
#include "irprog.h"
#include "irprintf.h"
#include "irgmod.h"
#include "irgopt.h"
#include "iroptimize.h"
+#include "irtools.h"
+#include "irdump.h"
#include "lowering.h"
-#include "error.h"
#include "bitset.h"
#include "debug.h"
#include "array_t.h"
-#include "irtools.h"
+#include "error.h"
#include "../bearch.h"
#include "../benode.h"
#include "../beflags.h"
#include "bearch_sparc_t.h"
-#include "bearch_sparc.h"
#include "sparc_new_nodes.h"
#include "gen_sparc_regalloc_if.h"
#include "sparc_transform.h"
#include "sparc_emitter.h"
-#include "sparc_map_regs.h"
DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)
return NULL;
}
-static void sparc_set_frame_entity(ir_node *node, ir_entity *ent)
-{
- (void) node;
- (void) ent;
- panic("sparc_set_frame_entity() called. This should not happen.");
- /* TODO: set the ir_entity assigned to the frame */
-}
-
/**
* This function is called by the generic backend to correct offsets for
* nodes accessing the stack.
}
}
-static int sparc_get_sp_bias(const ir_node *irn)
+static int sparc_get_sp_bias(const ir_node *node)
{
- (void) irn;
- return SPARC_MIN_STACKSIZE;
+ if (is_sparc_Save(node)) {
+ const sparc_save_attr_t *attr = get_sparc_save_attr_const(node);
+ /* Note we do not retport the change of the SPARC_MIN_STACKSIZE
+ * size, since we have additional magic in the emitter which
+ * calculates that! */
+ assert(attr->initial_stacksize >= SPARC_MIN_STACKSIZE);
+ return attr->initial_stacksize - SPARC_MIN_STACKSIZE;
+ }
+ return 0;
}
/* fill register allocator interface */
get_sparc_in_req,
sparc_classify,
sparc_get_frame_entity,
- sparc_set_frame_entity,
sparc_set_frame_offset,
sparc_get_sp_bias,
NULL, /* get_inverse */
sparc_transform_graph(cg);
if (cg->dump)
- be_dump(cg->irg, "-transformed", dump_ir_block_graph_sched);
+ dump_ir_graph(cg->irg, "transformed");
}
{
sparc_code_gen_t *cg = self;
/* fixup flags register */
- be_sched_fix_flags(cg->birg, &sparc_reg_classes[CLASS_sparc_flags], &sparc_flags_remat);
+ be_sched_fix_flags(cg->irg, &sparc_reg_classes[CLASS_sparc_flags], &sparc_flags_remat);
}
/**
ir_node *sched_point = sched_prev(node);
- load = new_bd_sparc_Load(dbgi, block, ptr, mem, mode, entity, false, 0, true);
+ load = new_bd_sparc_Ld(dbgi, block, ptr, mem, mode, entity, false, 0, true);
sched_add_after(sched_point, load);
sched_remove(node);
- proj = new_rd_Proj(dbgi, load, mode, pn_sparc_Load_res);
+ proj = new_rd_Proj(dbgi, load, mode, pn_sparc_Ld_res);
reg = arch_get_irn_register(node);
arch_set_irn_register(proj, reg);
ir_node *store;
sched_point = sched_prev(node);
- store = new_bd_sparc_Store(dbgi, block, ptr, val, mem, mode, entity, false, 0, true);
+ store = new_bd_sparc_St(dbgi, block, ptr, val, mem, mode, entity, false, 0, true);
sched_remove(node);
sched_add_after(sched_point, store);
static void sparc_after_ra(void *self)
{
sparc_code_gen_t *cg = self;
- be_coalesce_spillslots(cg->birg);
+ be_coalesce_spillslots(cg->irg);
irg_block_walk_graph(cg->irg, NULL, sparc_after_ra_walker, NULL);
}
free(cg);
}
-static void *sparc_cg_init(be_irg_t *birg);
+static void *sparc_cg_init(ir_graph *irg);
static const arch_code_generator_if_t sparc_code_gen_if = {
sparc_cg_init,
- NULL, /* get_pic_base hook */
- NULL, /* before abi introduce hook */
+ NULL, /* get_pic_base hook */
+ NULL, /* before abi introduce hook */
sparc_prepare_graph,
- NULL, /* spill hook */
+ NULL, /* spill hook */
sparc_before_ra, /* before register allocation hook */
sparc_after_ra, /* after register allocation hook */
NULL,
/**
* Initializes the code generator.
*/
-static void *sparc_cg_init(be_irg_t *birg)
+static void *sparc_cg_init(ir_graph *irg)
{
- static ir_type *int_tp = NULL;
- sparc_isa_t *isa = (sparc_isa_t *)birg->main_env->arch_env;
- sparc_code_gen_t *cg;
+ sparc_isa_t *isa = (sparc_isa_t *) be_get_irg_arch_env(irg);
+ sparc_code_gen_t *cg = XMALLOCZ(sparc_code_gen_t);
- if (! int_tp) {
- /* create an integer type with machine size */
- int_tp = new_type_primitive(mode_Is);
- }
-
- cg = XMALLOC(sparc_code_gen_t);
- cg->impl = &sparc_code_gen_if;
- cg->irg = birg->irg;
- //cg->reg_set = new_set(arm_cmp_irn_reg_assoc, 1024);
- cg->isa = isa;
- cg->birg = birg;
- //cg->int_tp = int_tp;
- //cg->have_fp_insn = 0;
- //cg->unknown_gp = NULL;
- //cg->unknown_fpa = NULL;
- cg->dump = (birg->main_env->options->dump_flags & DUMP_BE) ? 1 : 0;
+ cg->impl = &sparc_code_gen_if;
+ cg->irg = irg;
+ cg->isa = isa;
+ cg->dump = (be_get_irg_options(irg)->dump_flags & DUMP_BE) != 0;
/* enter the current code generator */
isa->cg = cg;
- return (arch_code_generator_t *)cg;
+ return (arch_code_generator_t*) cg;
}
-
-
const arch_isa_if_t sparc_isa_if;
static sparc_isa_t sparc_isa_template = {
{
- &sparc_isa_if, /* isa interface implementation */
+ &sparc_isa_if, /* isa interface implementation */
&sparc_gp_regs[REG_SP], /* stack pointer register */
&sparc_gp_regs[REG_FP], /* base pointer register */
&sparc_reg_classes[CLASS_sparc_gp], /* link pointer register class */
-1, /* stack direction */
- 1, /* power of two stack alignment for calls, 2^2 == 4 */
+ 3, /* power of two stack alignment for calls, 2^2 == 4 */
NULL, /* main environment */
7, /* costs for a spill instruction */
5, /* costs for a reload instruction */
+ false, /* no custom abi handling */
},
NULL /* current code generator */
};
+
+static void sparc_handle_intrinsics(void)
+{
+ ir_type *tp, *int_tp, *uint_tp;
+ i_record records[8];
+ int n_records = 0;
+
+ runtime_rt rt_iMod, rt_uMod;
+
+#define ID(x) new_id_from_chars(x, sizeof(x)-1)
+
+ int_tp = new_type_primitive(mode_Is);
+ uint_tp = new_type_primitive(mode_Iu);
+
+
+ /* SPARC has no signed mod instruction ... */
+ {
+ i_instr_record *map_Mod = &records[n_records++].i_instr;
+
+ tp = new_type_method(2, 1);
+ set_method_param_type(tp, 0, int_tp);
+ set_method_param_type(tp, 1, int_tp);
+ set_method_res_type(tp, 0, int_tp);
+
+ rt_iMod.ent = new_entity(get_glob_type(), ID(".rem"), tp);
+ set_entity_ld_ident(rt_iMod.ent, ID(".rem"));
+ rt_iMod.mode = mode_T;
+ rt_iMod.res_mode = mode_Is;
+ rt_iMod.mem_proj_nr = pn_Mod_M;
+ rt_iMod.regular_proj_nr = pn_Mod_X_regular;
+ rt_iMod.exc_proj_nr = pn_Mod_X_except;
+ rt_iMod.exc_mem_proj_nr = pn_Mod_M;
+ rt_iMod.res_proj_nr = pn_Mod_res;
+
+ set_entity_visibility(rt_iMod.ent, ir_visibility_external);
+
+ map_Mod->kind = INTRINSIC_INSTR;
+ map_Mod->op = op_Mod;
+ map_Mod->i_mapper = (i_mapper_func)i_mapper_RuntimeCall;
+ map_Mod->ctx = &rt_iMod;
+ }
+ /* ... nor an unsigned mod. */
+ {
+ i_instr_record *map_Mod = &records[n_records++].i_instr;
+
+ tp = new_type_method(2, 1);
+ set_method_param_type(tp, 0, uint_tp);
+ set_method_param_type(tp, 1, uint_tp);
+ set_method_res_type(tp, 0, uint_tp);
+
+ rt_uMod.ent = new_entity(get_glob_type(), ID(".urem"), tp);
+ set_entity_ld_ident(rt_uMod.ent, ID(".urem"));
+ rt_uMod.mode = mode_T;
+ rt_uMod.res_mode = mode_Iu;
+ rt_uMod.mem_proj_nr = pn_Mod_M;
+ rt_uMod.regular_proj_nr = pn_Mod_X_regular;
+ rt_uMod.exc_proj_nr = pn_Mod_X_except;
+ rt_uMod.exc_mem_proj_nr = pn_Mod_M;
+ rt_uMod.res_proj_nr = pn_Mod_res;
+
+ set_entity_visibility(rt_uMod.ent, ir_visibility_external);
+
+ map_Mod->kind = INTRINSIC_INSTR;
+ map_Mod->op = op_Mod;
+ map_Mod->i_mapper = (i_mapper_func)i_mapper_RuntimeCall;
+ map_Mod->ctx = &rt_uMod;
+ }
+
+ if (n_records > 0)
+ lower_intrinsics(records, n_records, /*part_block_used=*/0);
+}
+
+
/**
* Initializes the backend ISA
*/
sparc_register_init();
sparc_create_opcodes(&sparc_irn_ops);
+ sparc_handle_intrinsics();
- return &isa->arch_env;
+ return &isa->base;
}
sparc_isa_t *isa = self;
/* emit now all global declarations */
- be_gas_emit_decls(isa->arch_env.main_env);
+ be_gas_emit_decls(isa->base.main_env);
be_emit_exit();
free(self);
typedef struct {
be_abi_call_flags_bits_t flags;
- const arch_env_t *arch_env;
- ir_graph *irg;
+ ir_graph *irg;
} sparc_abi_env_t;
-static void *sparc_abi_init(const be_abi_call_t *call, const arch_env_t *arch_env, ir_graph *irg)
+static void *sparc_abi_init(const be_abi_call_t *call, ir_graph *irg)
{
sparc_abi_env_t *env = XMALLOC(sparc_abi_env_t);
be_abi_call_flags_t fl = be_abi_call_get_flags(call);
env->flags = fl.bits;
env->irg = irg;
- env->arch_env = arch_env;
return env;
}
if (between_type == NULL) {
between_type = new_type_class(new_id_from_str("sparc_between_type"));
- set_type_size_bytes(between_type, 0);
+ set_type_size_bytes(between_type, SPARC_MIN_STACKSIZE);
}
return between_type;
* Build the prolog, return the BASE POINTER register
*/
static const arch_register_t *sparc_abi_prologue(void *self, ir_node **mem,
- pmap *reg_map, int *stack_bias)
+ pmap *reg_map, int *stack_bias)
{
sparc_abi_env_t *env = self;
ir_node *block = get_irg_start_block(env->irg);
(void) mem;
(void) stack_bias;
- *stack_bias -= SPARC_MIN_STACKSIZE;
sp_proj = new_r_Proj(save, sp->reg_class->mode, pn_sparc_Save_stack);
*mem = new_r_Proj(save, mode_M, pn_sparc_Save_mem);
/* Build the epilog */
static void sparc_abi_epilogue(void *self, ir_node *bl, ir_node **mem,
- pmap *reg_map)
+ pmap *reg_map)
{
(void) self;
(void) bl;
sparc_abi_epilogue,
};
+static const arch_register_t *gp_param_out_regs[] = {
+ &sparc_gp_regs[REG_O0],
+ &sparc_gp_regs[REG_O1],
+ &sparc_gp_regs[REG_O2],
+ &sparc_gp_regs[REG_O3],
+ &sparc_gp_regs[REG_O4],
+ &sparc_gp_regs[REG_O5],
+};
+
+static const arch_register_t *gp_param_in_regs[] = {
+ &sparc_gp_regs[REG_I0],
+ &sparc_gp_regs[REG_I1],
+ &sparc_gp_regs[REG_I2],
+ &sparc_gp_regs[REG_I3],
+ &sparc_gp_regs[REG_I4],
+ &sparc_gp_regs[REG_I5],
+};
+
+/**
+ * get register for outgoing parameters 1-6
+ */
+static const arch_register_t *sparc_get_RegParamOut_reg(int n)
+{
+ assert(n < 6 && n >=0 && "trying to get (out) register for param >= 6");
+ return gp_param_out_regs[n];
+}
+
+/**
+ * get register for incoming parameters 1-6
+ */
+static const arch_register_t *sparc_get_RegParamIn_reg(int n)
+{
+ assert(n < 6 && n >=0 && "trying to get (in) register for param >= 6");
+ return gp_param_in_regs[n];
+}
+
/**
* Get the ABI restrictions for procedure calls.
* @param self The this pointer.
/* set abi flags for calls */
call_flags.bits.left_to_right = 0;
call_flags.bits.store_args_sequential = 1;
- /* */
call_flags.bits.try_omit_fp = 0;
call_flags.bits.fp_free = 0;
call_flags.bits.call_has_imm = 1;
}
static ir_graph **sparc_get_backend_irg_list(const void *self,
- ir_graph ***irgs)
+ ir_graph ***irgs)
{
(void) self;
(void) irgs;
sparc_get_list_sched_selector,
sparc_get_ilp_sched_selector,
sparc_get_reg_class_alignment,
- sparc_get_backend_params,
+ sparc_get_backend_params,
sparc_get_allowed_execution_units,
sparc_get_machine,
sparc_get_backend_irg_list,