X-Git-Url: http://nsz.repo.hu/git/?a=blobdiff_plain;f=ir%2Fbe%2Fsparc%2Fsparc_finish.c;h=ac8fc9e73ba1eb3080ee771edb0069f62d5153b5;hb=df2faee01a5832057bb3ca0ba5f67e979c916e19;hp=44f8ecab302f36d221dfee81d7243bb1cf102c24;hpb=7a5bb07f226ea9de2796b5a47d718572ee96504a;p=libfirm diff --git a/ir/be/sparc/sparc_finish.c b/ir/be/sparc/sparc_finish.c index 44f8ecab3..ac8fc9e73 100644 --- a/ir/be/sparc/sparc_finish.c +++ b/ir/be/sparc/sparc_finish.c @@ -47,7 +47,7 @@ #include "ircons.h" #include "irgwalk.h" #include "heights.h" - +#include "beirg.h" #include "bepeephole.h" #include "benode.h" #include "besched.h" @@ -80,29 +80,27 @@ static void kill_unused_stacknodes(ir_node *node) static void introduce_epilog(ir_node *ret) { - const arch_register_t *sp_reg = &sparc_registers[REG_SP]; - ir_graph *irg = get_irn_irg(ret); - be_stack_layout_t *layout = be_get_irg_stack_layout(irg); - ir_node *block = get_nodes_block(ret); - ir_type *frame_type = get_irg_frame_type(irg); - unsigned frame_size = get_type_size_bytes(frame_type); - int sp_idx = be_find_return_reg_input(ret, sp_reg); - ir_node *sp = get_irn_n(ret, sp_idx); + arch_register_t const *const sp_reg = &sparc_registers[REG_SP]; + assert(arch_get_irn_register_req_in(ret, n_sparc_Return_sp) == sp_reg->single_req); + ir_node *const sp = get_irn_n(ret, n_sparc_Return_sp); + ir_node *const block = get_nodes_block(ret); + ir_graph *const irg = get_irn_irg(ret); + be_stack_layout_t *const layout = be_get_irg_stack_layout(irg); if (!layout->sp_relative) { - const arch_register_t *fp_reg = &sparc_registers[REG_FRAME_POINTER]; - const arch_register_t *sp_reg = &sparc_registers[REG_SP]; - ir_node *fp = be_get_initial_reg_value(irg, fp_reg); - ir_node *sp = be_get_initial_reg_value(irg, sp_reg); - ir_node *restore = new_bd_sparc_RestoreZero(NULL, block, sp, fp); + arch_register_t const *const fp_reg = &sparc_registers[REG_FRAME_POINTER]; + ir_node *const fp = be_get_initial_reg_value(irg, fp_reg); + ir_node *const new_sp = be_get_initial_reg_value(irg, sp_reg); + ir_node *const restore = new_bd_sparc_RestoreZero(NULL, block, new_sp, fp); sched_add_before(ret, restore); arch_set_irn_register(restore, sp_reg); - set_irn_n(ret, sp_idx, restore); - + set_irn_n(ret, n_sparc_Return_sp, restore); kill_unused_stacknodes(sp); } else { - ir_node *incsp = be_new_IncSP(sp_reg, block, sp, -frame_size, 0); - set_irn_n(ret, sp_idx, incsp); + ir_type *const frame_type = get_irg_frame_type(irg); + unsigned const frame_size = get_type_size_bytes(frame_type); + ir_node *const incsp = be_new_IncSP(sp_reg, block, sp, -frame_size, 0); + set_irn_n(ret, n_sparc_Return_sp, incsp); sched_add_before(ret, incsp); } } @@ -114,7 +112,6 @@ void sparc_introduce_prolog_epilog(ir_graph *irg) be_stack_layout_t *layout = be_get_irg_stack_layout(irg); ir_node *block = get_nodes_block(start); ir_node *initial_sp = be_get_initial_reg_value(irg, sp_reg); - ir_node *sp = initial_sp; ir_node *schedpoint = start; ir_type *frame_type = get_irg_frame_type(irg); unsigned frame_size = get_type_size_bytes(frame_type); @@ -136,14 +133,12 @@ void sparc_introduce_prolog_epilog(ir_graph *irg) schedpoint = sched_next(schedpoint); if (!layout->sp_relative) { - ir_node *save = new_bd_sparc_Save_imm(NULL, block, sp, NULL, - -SPARC_MIN_STACKSIZE-frame_size); + ir_node *const save = new_bd_sparc_Save_imm(NULL, block, initial_sp, NULL, -(SPARC_MIN_STACKSIZE + frame_size)); arch_set_irn_register(save, sp_reg); sched_add_after(schedpoint, save); schedpoint = save; - edges_reroute(initial_sp, save); - set_irn_n(save, n_sparc_Save_stack, initial_sp); + edges_reroute_except(initial_sp, save, save); /* we still need the Save even if noone is explicitely using the * value. (TODO: this isn't 100% correct yet, something at the end of @@ -156,9 +151,8 @@ void sparc_introduce_prolog_epilog(ir_graph *irg) sched_add_after(schedpoint, keep); } } else { - ir_node *incsp = be_new_IncSP(sp_reg, block, sp, frame_size, 0); - edges_reroute(initial_sp, incsp); - be_set_IncSP_pred(incsp, sp); + ir_node *const incsp = be_new_IncSP(sp_reg, block, initial_sp, frame_size, 0); + edges_reroute_except(initial_sp, incsp, incsp); sched_add_after(schedpoint, incsp); } } @@ -251,7 +245,7 @@ static void finish_sparc_FrameAddr(ir_node *node) sched_add_before(node, new_frameaddr); arch_set_irn_register(new_frameaddr, reg); - exchange(node, new_frameaddr); + be_peephole_exchange(node, new_frameaddr); } } @@ -273,21 +267,49 @@ static void finish_sparc_Ld(ir_node *node) ir_node *constant = create_constant_from_immediate(node, offset); ir_node *new_load = new_bd_sparc_Ld_reg(dbgi, block, ptr, constant, mem, load_store_mode); sparc_load_store_attr_t *new_load_attr = get_sparc_load_store_attr(new_load); - unsigned n_outs = arch_get_irn_n_outs(node); - unsigned i; new_load_attr->is_frame_entity = load_store_attr->is_frame_entity; new_load_attr->is_reg_reg = load_store_attr->is_reg_reg; sched_add_before(node, new_load); - for (i = 0; i < n_outs; i++) { + be_foreach_out(node, i) { arch_set_irn_register_out(new_load, i, arch_get_irn_register_out(node, i)); } - exchange(node, new_load); + be_peephole_exchange(node, new_load); } } +static void split_sparc_ldf(ir_node *node) +{ + sparc_load_store_attr_t *attr = get_sparc_load_store_attr(node); + unsigned bits = get_mode_size_bits(attr->load_store_mode); + /* split 128bit loads into 2 64bit loads */ + if (bits == 128) { + dbg_info *dbgi = get_irn_dbg_info(node); + ir_node *block = get_nodes_block(node); + ir_node *ptr = get_irn_n(node, n_sparc_Ldf_ptr); + ir_node *mem = get_irn_n(node, n_sparc_Ldf_mem); + ir_node *new_load + = new_bd_sparc_Ldf_d(dbgi, block, ptr, mem, mode_D, + attr->base.immediate_value_entity, + attr->base.immediate_value + 8, + attr->is_frame_entity); + ir_node *new_mem = new_r_Proj(new_load, mode_M, pn_sparc_Ldf_M); + + const arch_register_t *reg + = arch_get_irn_register_out(node, pn_sparc_Ldf_res); + unsigned reg_index = reg->global_index; + + arch_set_irn_register_out(new_load, pn_sparc_Ldf_res, + &sparc_registers[reg_index+2]); + + attr->load_store_mode = mode_D; + set_irn_n(node, n_sparc_Ldf_mem, new_mem); + sched_add_before(node, new_load); + } +} + static void finish_sparc_Ldf(ir_node *node) { sparc_attr_t *attr = get_sparc_attr(node); @@ -307,17 +329,15 @@ static void finish_sparc_Ldf(ir_node *node) ir_node *new_ptr = new_bd_sparc_Add_reg(dbgi, block, ptr, constant); ir_node *new_load = new_bd_sparc_Ldf_s(dbgi, block, new_ptr, mem, load_store_mode, NULL, 0, true); sparc_load_store_attr_t *new_load_attr = get_sparc_load_store_attr(new_load); - unsigned n_outs = arch_get_irn_n_outs(node); - unsigned i; new_load_attr->is_frame_entity = load_store_attr->is_frame_entity; new_load_attr->is_reg_reg = load_store_attr->is_reg_reg; sched_add_before(node, new_load); - for (i = 0; i < n_outs; i++) { + be_foreach_out(node, i) { arch_set_irn_register_out(new_load, i, arch_get_irn_register_out(node, i)); } - exchange(node, new_load); + be_peephole_exchange(node, new_load); } } @@ -341,17 +361,15 @@ static void finish_sparc_St(ir_node *node) ir_node *constant = create_constant_from_immediate(node, offset); ir_node *new_load = new_bd_sparc_St_reg(dbgi, block, value, ptr, constant, mem, load_store_mode); sparc_load_store_attr_t *new_load_attr = get_sparc_load_store_attr(new_load); - unsigned n_outs = arch_get_irn_n_outs(node); - unsigned i; new_load_attr->is_frame_entity = load_store_attr->is_frame_entity; new_load_attr->is_reg_reg = load_store_attr->is_reg_reg; sched_add_before(node, new_load); - for (i = 0; i < n_outs; i++) { + be_foreach_out(node, i) { arch_set_irn_register_out(new_load, i, arch_get_irn_register_out(node, i)); } - exchange(node, new_load); + be_peephole_exchange(node, new_load); } } @@ -376,17 +394,15 @@ static void finish_sparc_Stf(ir_node *node) ir_node *new_ptr = new_bd_sparc_Add_reg(dbgi, block, ptr, constant); ir_node *new_load = new_bd_sparc_Stf_s(dbgi, block, value, new_ptr, mem, load_store_mode, NULL, 0, true); sparc_load_store_attr_t *new_load_attr = get_sparc_load_store_attr(new_load); - unsigned n_outs = arch_get_irn_n_outs(node); - unsigned i; new_load_attr->is_frame_entity = load_store_attr->is_frame_entity; new_load_attr->is_reg_reg = load_store_attr->is_reg_reg; sched_add_before(node, new_load); - for (i = 0; i < n_outs; i++) { + be_foreach_out(node, i) { arch_set_irn_register_out(new_load, i, arch_get_irn_register_out(node, i)); } - exchange(node, new_load); + be_peephole_exchange(node, new_load); } } @@ -493,11 +509,12 @@ static void peephole_sparc_RestoreZero(ir_node *node) * (and therefore after code selection). */ int n_tries = 10; /* limit our search */ - ir_node *schedpoint = node; - while (sched_has_prev(schedpoint)) { + for (ir_node *schedpoint = node;;) { const arch_register_t *reg; schedpoint = sched_prev(schedpoint); + if (sched_is_begin(schedpoint)) + break; if (--n_tries == 0) break; @@ -512,13 +529,13 @@ static void peephole_sparc_RestoreZero(ir_node *node) if (!is_restorezeroopt_reg(reg)) continue; - if (be_is_Copy(schedpoint) && be_can_move_before(heights, schedpoint, node)) { - ir_node *op = get_irn_n(schedpoint, n_be_Copy_op); + if (be_is_Copy(schedpoint) && be_can_move_down(heights, schedpoint, node)) { + ir_node *const op = be_get_Copy_op(schedpoint); replace_with_restore_imm(node, schedpoint, op, NULL, 0); } else if (is_sparc_Or(schedpoint) && arch_get_irn_flags(schedpoint) & ((arch_irn_flags_t)sparc_arch_irn_flag_immediate_form) && arch_get_irn_register_in(schedpoint, 0) == &sparc_registers[REG_G0] && - be_can_move_before(heights, schedpoint, node)) { + be_can_move_down(heights, schedpoint, node)) { /* it's a constant */ const sparc_attr_t *attr = get_sparc_attr_const(schedpoint); ir_entity *entity = attr->immediate_value_entity; @@ -526,7 +543,7 @@ static void peephole_sparc_RestoreZero(ir_node *node) ir_node *g0 = get_irn_n(schedpoint, 0); replace_with_restore_imm(node, schedpoint, g0, entity, immediate); } else if (is_sparc_Add(schedpoint) && - be_can_move_before(heights, schedpoint, node)) { + be_can_move_down(heights, schedpoint, node)) { if (arch_get_irn_flags(schedpoint) & ((arch_irn_flags_t)sparc_arch_irn_flag_immediate_form)) { ir_node *op = get_irn_n(schedpoint, 0); const sparc_attr_t *attr = get_sparc_attr_const(schedpoint); @@ -541,7 +558,7 @@ static void peephole_sparc_RestoreZero(ir_node *node) } else if (is_sparc_Sub(schedpoint) && arch_get_irn_flags(schedpoint) & ((arch_irn_flags_t)sparc_arch_irn_flag_immediate_form) && arch_get_irn_register_in(schedpoint, 0) == &sparc_registers[REG_G0] && - be_can_move_before(heights, schedpoint, node)) { + be_can_move_down(heights, schedpoint, node)) { /* it's a constant */ const sparc_attr_t *attr = get_sparc_attr_const(schedpoint); ir_entity *entity = attr->immediate_value_entity; @@ -560,25 +577,13 @@ static void peephole_sparc_RestoreZero(ir_node *node) static void finish_sparc_Return(ir_node *node) { - ir_node *schedpoint = node; - ir_node *restore; - /* see that there is no code between Return and restore, if there is move - * it in front of the restore */ - while (true) { - if (!sched_has_prev(schedpoint)) - return; - schedpoint = sched_prev(schedpoint); - if (is_sparc_Restore(schedpoint) || is_sparc_RestoreZero(schedpoint)) + /* Ensure that the restore is directly before the return. */ + sched_foreach_reverse_from(sched_prev(node), restore) { + if (is_sparc_Restore(restore) || is_sparc_RestoreZero(restore)) { + sched_remove(restore); + sched_add_before(node, restore); break; - } - restore = schedpoint; - schedpoint = sched_prev(node); - /* move all code between return and restore up */ - while (schedpoint != restore) { - ir_node *next_schedpoint = sched_prev(schedpoint); - sched_remove(schedpoint); - sched_add_before(restore, schedpoint); - schedpoint = next_schedpoint; + } } } @@ -658,6 +663,7 @@ void sparc_finish_graph(ir_graph *irg) register_peephole_optimisation(op_sparc_FrameAddr, peephole_sparc_FrameAddr); register_peephole_optimisation(op_sparc_RestoreZero, peephole_sparc_RestoreZero); + register_peephole_optimisation(op_sparc_Ldf, split_sparc_ldf); be_peephole_opt(irg); /* perform legalizations (mostly fix nodes with too big immediates) */