2 * Copyright (C) 1995-2011 University of Karlsruhe. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * @brief Backend node support for generic backend nodes.
23 * @author Sebastian Hack
26 * Backend node support for generic backend nodes.
27 * This file provides Perm, Copy, Spill and Reload nodes.
39 #include "bitfiddle.h"
40 #include "raw_bitset.h"
51 #include "irbackedge_t.h"
52 #include "irverify_t.h"
62 typedef struct be_node_attr_t {
66 /** The be_Return nodes attribute type. */
69 int num_ret_vals; /**< number of return values */
70 unsigned pop; /**< number of bytes that should be popped */
71 int emit_pop; /**< if set, emit pop bytes, even if pop = 0 */
74 /** The be_IncSP attribute type. */
77 int offset; /**< The offset by which the stack shall be
79 int align; /**< whether stack should be aligned after the
83 /** The be_Frame attribute type. */
90 /** The be_Call attribute type. */
93 ir_entity *ent; /**< called entity if this is a static call. */
95 ir_type *call_tp; /**< call type, copied from the original Call */
100 ir_entity **in_entities;
101 ir_entity **out_entities;
107 ir_op *op_be_MemPerm;
110 ir_op *op_be_CopyKeep;
117 ir_op *op_be_FrameAddr;
120 * Compare the attributes of two be_FrameAddr nodes.
122 * @return zero if both nodes have identically attributes
124 static int FrameAddr_cmp_attr(const ir_node *a, const ir_node *b)
126 const be_frame_attr_t *a_attr = (const be_frame_attr_t*)get_irn_generic_attr_const(a);
127 const be_frame_attr_t *b_attr = (const be_frame_attr_t*)get_irn_generic_attr_const(b);
129 if (a_attr->ent != b_attr->ent || a_attr->offset != b_attr->offset)
132 return be_nodes_equal(a, b);
136 * Compare the attributes of two be_Return nodes.
138 * @return zero if both nodes have identically attributes
140 static int Return_cmp_attr(const ir_node *a, const ir_node *b)
142 const be_return_attr_t *a_attr = (const be_return_attr_t*)get_irn_generic_attr_const(a);
143 const be_return_attr_t *b_attr = (const be_return_attr_t*)get_irn_generic_attr_const(b);
145 if (a_attr->num_ret_vals != b_attr->num_ret_vals)
147 if (a_attr->pop != b_attr->pop)
149 if (a_attr->emit_pop != b_attr->emit_pop)
152 return be_nodes_equal(a, b);
156 * Compare the attributes of two be_IncSP nodes.
158 * @return zero if both nodes have identically attributes
160 static int IncSP_cmp_attr(const ir_node *a, const ir_node *b)
162 const be_incsp_attr_t *a_attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(a);
163 const be_incsp_attr_t *b_attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(b);
165 if (a_attr->offset != b_attr->offset)
168 return be_nodes_equal(a, b);
172 * Compare the attributes of two be_Call nodes.
174 * @return zero if both nodes have identically attributes
176 static int Call_cmp_attr(const ir_node *a, const ir_node *b)
178 const be_call_attr_t *a_attr = (const be_call_attr_t*)get_irn_generic_attr_const(a);
179 const be_call_attr_t *b_attr = (const be_call_attr_t*)get_irn_generic_attr_const(b);
181 if (a_attr->ent != b_attr->ent ||
182 a_attr->call_tp != b_attr->call_tp)
185 return be_nodes_equal(a, b);
188 static arch_register_req_t *allocate_reg_req(const ir_node *node)
190 ir_graph *irg = get_irn_irg(node);
191 struct obstack *obst = be_get_be_obst(irg);
193 arch_register_req_t *req = OALLOCZ(obst, arch_register_req_t);
197 void be_set_constr_in(ir_node *node, int pos, const arch_register_req_t *req)
199 backend_info_t *info = be_get_info(node);
200 assert(pos < get_irn_arity(node));
201 info->in_reqs[pos] = req;
204 void be_set_constr_out(ir_node *node, int pos, const arch_register_req_t *req)
206 backend_info_t *info = be_get_info(node);
207 info->out_infos[pos].req = req;
211 * Initializes the generic attribute of all be nodes and return it.
213 static void init_node_attr(ir_node *node, int n_inputs, int n_outputs)
215 ir_graph *irg = get_irn_irg(node);
216 struct obstack *obst = be_get_be_obst(irg);
217 backend_info_t *info = be_get_info(node);
218 const arch_register_req_t **in_reqs;
222 assert(n_inputs == get_irn_arity(node));
223 in_reqs = OALLOCN(obst, const arch_register_req_t*, n_inputs);
224 for (i = 0; i < n_inputs; ++i) {
225 in_reqs[i] = arch_no_register_req;
228 in_reqs = NEW_ARR_F(const arch_register_req_t*, 0);
230 info->in_reqs = in_reqs;
232 if (n_outputs >= 0) {
234 info->out_infos = NEW_ARR_D(reg_out_info_t, obst, n_outputs);
235 memset(info->out_infos, 0, n_outputs * sizeof(info->out_infos[0]));
236 for (i = 0; i < n_outputs; ++i) {
237 info->out_infos[i].req = arch_no_register_req;
240 info->out_infos = NEW_ARR_F(reg_out_info_t, 0);
244 static void add_register_req_in(ir_node *node, const arch_register_req_t *req)
246 backend_info_t *info = be_get_info(node);
247 ARR_APP1(const arch_register_req_t*, info->in_reqs, req);
250 ir_node *be_new_Spill(const arch_register_class_t *cls,
251 const arch_register_class_t *cls_frame, ir_node *bl,
252 ir_node *frame, ir_node *to_spill)
257 ir_graph *irg = get_Block_irg(bl);
261 res = new_ir_node(NULL, irg, bl, op_be_Spill, mode_M, 2, in);
262 init_node_attr(res, 2, 1);
263 a = (be_frame_attr_t*) get_irn_generic_attr(res);
266 a->base.exc.pin_state = op_pin_state_pinned;
268 be_node_set_reg_class_in(res, n_be_Spill_frame, cls_frame);
269 be_node_set_reg_class_in(res, n_be_Spill_val, cls);
270 arch_set_irn_register_req_out(res, 0, arch_no_register_req);
275 ir_node *be_new_Reload(const arch_register_class_t *cls,
276 const arch_register_class_t *cls_frame, ir_node *block,
277 ir_node *frame, ir_node *mem, ir_mode *mode)
281 ir_graph *irg = get_Block_irg(block);
286 res = new_ir_node(NULL, irg, block, op_be_Reload, mode, 2, in);
288 init_node_attr(res, 2, 1);
289 be_node_set_reg_class_out(res, 0, cls);
291 be_node_set_reg_class_in(res, n_be_Reload_frame, cls_frame);
292 arch_set_irn_flags(res, arch_irn_flags_rematerializable);
294 a = (be_frame_attr_t*) get_irn_generic_attr(res);
297 a->base.exc.pin_state = op_pin_state_pinned;
302 ir_node *be_get_Reload_mem(const ir_node *irn)
304 assert(be_is_Reload(irn));
305 return get_irn_n(irn, n_be_Reload_mem);
308 ir_node *be_get_Reload_frame(const ir_node *irn)
310 assert(be_is_Reload(irn));
311 return get_irn_n(irn, n_be_Reload_frame);
314 ir_node *be_get_Spill_val(const ir_node *irn)
316 assert(be_is_Spill(irn));
317 return get_irn_n(irn, n_be_Spill_val);
320 ir_node *be_get_Spill_frame(const ir_node *irn)
322 assert(be_is_Spill(irn));
323 return get_irn_n(irn, n_be_Spill_frame);
326 ir_node *be_new_Perm(const arch_register_class_t *cls, ir_node *block,
327 int n, ir_node *in[])
330 ir_graph *irg = get_Block_irg(block);
331 be_node_attr_t *attr;
333 ir_node *irn = new_ir_node(NULL, irg, block, op_be_Perm, mode_T, n, in);
334 init_node_attr(irn, n, n);
335 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
336 attr->exc.pin_state = op_pin_state_pinned;
337 for (i = 0; i < n; ++i) {
338 const ir_node *input = in[i];
339 const arch_register_req_t *req = arch_get_irn_register_req(input);
340 if (req->width == 1) {
341 be_set_constr_in(irn, i, cls->class_req);
342 be_set_constr_out(irn, i, cls->class_req);
344 arch_register_req_t *new_req = allocate_reg_req(irn);
346 new_req->type = (req->type & arch_register_req_type_aligned);
347 new_req->width = req->width;
348 be_set_constr_in(irn, i, new_req);
349 be_set_constr_out(irn, i, new_req);
356 void be_Perm_reduce(ir_node *perm, int new_size, int *map)
358 int arity = get_irn_arity(perm);
359 const arch_register_req_t **old_in_reqs
360 = ALLOCAN(const arch_register_req_t*, arity);
361 reg_out_info_t *old_infos = ALLOCAN(reg_out_info_t, arity);
362 backend_info_t *info = be_get_info(perm);
366 assert(be_is_Perm(perm));
367 assert(new_size <= arity);
369 new_in = ALLOCAN(ir_node*, new_size);
371 /* save the old register data */
372 memcpy(old_in_reqs, info->in_reqs, arity * sizeof(old_in_reqs[0]));
373 memcpy(old_infos, info->out_infos, arity * sizeof(old_infos[0]));
375 /* compose the new in array and set the new register data directly */
376 for (i = 0; i < new_size; ++i) {
378 new_in[i] = get_irn_n(perm, idx);
379 info->in_reqs[i] = old_in_reqs[idx];
380 info->out_infos[i] = old_infos[idx];
383 set_irn_in(perm, new_size, new_in);
386 ir_node *be_new_MemPerm(ir_node *block, int n, ir_node *in[])
388 ir_graph *irg = get_Block_irg(block);
389 const arch_env_t *arch_env = be_get_irg_arch_env(irg);
390 ir_node *frame = get_irg_frame(irg);
391 const arch_register_t *sp = arch_env->sp;
393 be_memperm_attr_t *attr;
396 real_in = ALLOCAN(ir_node*, n + 1);
398 memcpy(&real_in[1], in, n * sizeof(real_in[0]));
400 irn = new_ir_node(NULL, irg, block, op_be_MemPerm, mode_T, n+1, real_in);
402 init_node_attr(irn, n + 1, n);
403 be_node_set_reg_class_in(irn, 0, sp->reg_class);
405 attr = (be_memperm_attr_t*)get_irn_generic_attr(irn);
406 attr->in_entities = OALLOCNZ(irg->obst, ir_entity*, n);
407 attr->out_entities = OALLOCNZ(irg->obst, ir_entity*, n);
412 ir_node *be_new_Copy(ir_node *bl, ir_node *op)
416 arch_register_req_t *req;
417 be_node_attr_t *attr;
418 ir_graph *irg = get_Block_irg(bl);
419 const arch_register_req_t *in_req = arch_get_irn_register_req(op);
420 const arch_register_class_t *cls = in_req->cls;
423 res = new_ir_node(NULL, irg, bl, op_be_Copy, get_irn_mode(op), 1, in);
424 init_node_attr(res, 1, 1);
425 attr = (be_node_attr_t*) get_irn_generic_attr(res);
426 attr->exc.pin_state = op_pin_state_floats;
427 be_node_set_reg_class_in(res, 0, cls);
428 be_node_set_reg_class_out(res, 0, cls);
430 req = allocate_reg_req(res);
432 req->type = arch_register_req_type_should_be_same
433 | (in_req->type & arch_register_req_type_aligned);
434 req->other_same = 1U << 0;
435 req->width = in_req->width;
436 be_set_constr_out(res, 0, req);
441 ir_node *be_get_Copy_op(const ir_node *cpy)
443 return get_irn_n(cpy, n_be_Copy_op);
446 void be_set_Copy_op(ir_node *cpy, ir_node *op)
448 set_irn_n(cpy, n_be_Copy_op, op);
451 ir_node *be_new_Keep(ir_node *block, int n, ir_node *in[])
455 ir_graph *irg = get_Block_irg(block);
456 be_node_attr_t *attr;
458 res = new_ir_node(NULL, irg, block, op_be_Keep, mode_ANY, -1, NULL);
459 init_node_attr(res, -1, 1);
460 attr = (be_node_attr_t*) get_irn_generic_attr(res);
461 attr->exc.pin_state = op_pin_state_pinned;
463 for (i = 0; i < n; ++i) {
464 ir_node *pred = in[i];
465 add_irn_n(res, pred);
466 const arch_register_req_t *req = arch_get_irn_register_req(pred);
467 req = req->cls != NULL ? req->cls->class_req : arch_no_register_req;
468 add_register_req_in(res, req);
475 void be_Keep_add_node(ir_node *keep, const arch_register_class_t *cls, ir_node *node)
477 assert(be_is_Keep(keep));
478 add_irn_n(keep, node);
479 add_register_req_in(keep, cls->class_req);
482 ir_node *be_new_Call(dbg_info *dbg, ir_graph *irg, ir_node *bl, ir_node *mem,
483 const arch_register_req_t *sp_req, ir_node *sp,
484 const arch_register_req_t *ptr_req, ir_node *ptr,
485 int n_outs, int n, ir_node *in[], ir_type *call_tp)
488 int real_n = n_be_Call_first_arg + n;
492 NEW_ARR_A(ir_node *, real_in, real_n);
493 real_in[n_be_Call_mem] = mem;
494 real_in[n_be_Call_sp] = sp;
495 real_in[n_be_Call_ptr] = ptr;
496 memcpy(&real_in[n_be_Call_first_arg], in, n * sizeof(in[0]));
498 irn = new_ir_node(dbg, irg, bl, op_be_Call, mode_T, real_n, real_in);
499 init_node_attr(irn, real_n, n_outs);
500 a = (be_call_attr_t*)get_irn_generic_attr(irn);
502 a->call_tp = call_tp;
504 a->base.exc.pin_state = op_pin_state_pinned;
505 be_set_constr_in(irn, n_be_Call_sp, sp_req);
506 be_set_constr_in(irn, n_be_Call_ptr, ptr_req);
510 ir_entity *be_Call_get_entity(const ir_node *call)
512 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
513 assert(be_is_Call(call));
517 void be_Call_set_entity(ir_node *call, ir_entity *ent)
519 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
520 assert(be_is_Call(call));
524 ir_type *be_Call_get_type(ir_node *call)
526 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
527 assert(be_is_Call(call));
531 void be_Call_set_type(ir_node *call, ir_type *call_tp)
533 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
534 assert(be_is_Call(call));
535 a->call_tp = call_tp;
538 void be_Call_set_pop(ir_node *call, unsigned pop)
540 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
544 unsigned be_Call_get_pop(const ir_node *call)
546 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
550 ir_node *be_new_Return(dbg_info *dbg, ir_graph *irg, ir_node *block, int n_res,
551 unsigned pop, int n, ir_node *in[])
556 res = new_ir_node(dbg, irg, block, op_be_Return, mode_X, n, in);
557 init_node_attr(res, n, 1);
558 be_set_constr_out(res, 0, arch_no_register_req);
560 a = (be_return_attr_t*)get_irn_generic_attr(res);
561 a->num_ret_vals = n_res;
564 a->base.exc.pin_state = op_pin_state_pinned;
569 int be_Return_get_n_rets(const ir_node *ret)
571 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
572 return a->num_ret_vals;
575 unsigned be_Return_get_pop(const ir_node *ret)
577 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
581 int be_Return_get_emit_pop(const ir_node *ret)
583 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
587 void be_Return_set_emit_pop(ir_node *ret, int emit_pop)
589 be_return_attr_t *a = (be_return_attr_t*)get_irn_generic_attr(ret);
590 a->emit_pop = emit_pop;
593 ir_node *be_new_IncSP(const arch_register_t *sp, ir_node *bl,
594 ir_node *old_sp, int offset, int align)
599 ir_graph *irg = get_Block_irg(bl);
602 irn = new_ir_node(NULL, irg, bl, op_be_IncSP, sp->reg_class->mode,
604 init_node_attr(irn, 1, 1);
605 a = (be_incsp_attr_t*)get_irn_generic_attr(irn);
608 a->base.exc.pin_state = op_pin_state_pinned;
610 /* Set output constraint to stack register. */
611 be_node_set_reg_class_in(irn, 0, sp->reg_class);
612 be_set_constr_single_reg_out(irn, 0, sp, arch_register_req_type_produces_sp);
617 ir_node *be_new_AddSP(const arch_register_t *sp, ir_node *bl, ir_node *old_sp,
621 ir_node *in[n_be_AddSP_last];
623 be_node_attr_t *attr;
625 in[n_be_AddSP_old_sp] = old_sp;
626 in[n_be_AddSP_size] = sz;
628 irg = get_Block_irg(bl);
629 irn = new_ir_node(NULL, irg, bl, op_be_AddSP, mode_T, n_be_AddSP_last, in);
630 init_node_attr(irn, n_be_AddSP_last, pn_be_AddSP_last);
631 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
632 attr->exc.pin_state = op_pin_state_pinned;
634 /* Set output constraint to stack register. */
635 be_set_constr_single_reg_in(irn, n_be_AddSP_old_sp, sp,
636 arch_register_req_type_none);
637 be_node_set_reg_class_in(irn, n_be_AddSP_size, sp->reg_class);
638 be_set_constr_single_reg_out(irn, pn_be_AddSP_sp, sp,
639 arch_register_req_type_produces_sp);
644 ir_node *be_new_SubSP(const arch_register_t *sp, ir_node *bl, ir_node *old_sp, ir_node *sz)
647 ir_node *in[n_be_SubSP_last];
649 be_node_attr_t *attr;
651 in[n_be_SubSP_old_sp] = old_sp;
652 in[n_be_SubSP_size] = sz;
654 irg = get_Block_irg(bl);
655 irn = new_ir_node(NULL, irg, bl, op_be_SubSP, mode_T, n_be_SubSP_last, in);
656 init_node_attr(irn, n_be_SubSP_last, pn_be_SubSP_last);
657 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
658 attr->exc.pin_state = op_pin_state_pinned;
660 /* Set output constraint to stack register. */
661 be_set_constr_single_reg_in(irn, n_be_SubSP_old_sp, sp,
662 arch_register_req_type_none);
663 be_node_set_reg_class_in(irn, n_be_SubSP_size, sp->reg_class);
664 be_set_constr_single_reg_out(irn, pn_be_SubSP_sp, sp, arch_register_req_type_produces_sp);
669 ir_node *be_new_Start(dbg_info *dbgi, ir_node *bl, int n_outs)
672 ir_graph *irg = get_Block_irg(bl);
673 be_node_attr_t *attr;
675 res = new_ir_node(dbgi, irg, bl, op_be_Start, mode_T, 0, NULL);
676 init_node_attr(res, 0, n_outs);
677 attr = (be_node_attr_t*) get_irn_generic_attr(res);
678 attr->exc.pin_state = op_pin_state_pinned;
683 ir_node *be_new_FrameAddr(const arch_register_class_t *cls_frame, ir_node *bl, ir_node *frame, ir_entity *ent)
688 ir_graph *irg = get_Block_irg(bl);
691 irn = new_ir_node(NULL, irg, bl, op_be_FrameAddr, get_irn_mode(frame), 1, in);
692 init_node_attr(irn, 1, 1);
693 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
696 a->base.exc.pin_state = op_pin_state_floats;
697 be_node_set_reg_class_in(irn, 0, cls_frame);
698 be_node_set_reg_class_out(irn, 0, cls_frame);
700 return optimize_node(irn);
703 ir_node *be_get_FrameAddr_frame(const ir_node *node)
705 assert(be_is_FrameAddr(node));
706 return get_irn_n(node, n_be_FrameAddr_ptr);
709 ir_entity *be_get_FrameAddr_entity(const ir_node *node)
711 const be_frame_attr_t *attr = (const be_frame_attr_t*)get_irn_generic_attr_const(node);
715 ir_node *be_new_CopyKeep(ir_node *bl, ir_node *src, int n, ir_node *in_keep[])
718 ir_node **in = ALLOCAN(ir_node*, n + 1);
719 ir_graph *irg = get_Block_irg(bl);
720 const arch_register_req_t *req = arch_get_irn_register_req(src);
721 const arch_register_class_t *cls = req->cls;
722 ir_mode *mode = get_irn_mode(src);
723 be_node_attr_t *attr;
726 memcpy(&in[1], in_keep, n * sizeof(in[0]));
727 irn = new_ir_node(NULL, irg, bl, op_be_CopyKeep, mode, n + 1, in);
728 init_node_attr(irn, n + 1, 1);
729 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
730 attr->exc.pin_state = op_pin_state_floats;
731 be_node_set_reg_class_in(irn, 0, cls);
732 be_node_set_reg_class_out(irn, 0, cls);
733 for (int i = 0; i < n; ++i) {
734 ir_node *pred = in_keep[i];
735 const arch_register_req_t *req = arch_get_irn_register_req(pred);
736 req = req->cls != NULL ? req->cls->class_req : arch_no_register_req;
737 be_set_constr_in(irn, i+1, req);
743 ir_node *be_new_CopyKeep_single(ir_node *bl, ir_node *src, ir_node *keep)
745 return be_new_CopyKeep(bl, src, 1, &keep);
748 ir_node *be_get_CopyKeep_op(const ir_node *cpy)
750 return get_irn_n(cpy, n_be_CopyKeep_op);
753 void be_set_CopyKeep_op(ir_node *cpy, ir_node *op)
755 set_irn_n(cpy, n_be_CopyKeep_op, op);
758 static bool be_has_frame_entity(const ir_node *irn)
760 switch (get_irn_opcode(irn)) {
770 ir_entity *be_get_frame_entity(const ir_node *irn)
772 if (be_has_frame_entity(irn)) {
773 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
779 int be_get_frame_offset(const ir_node *irn)
781 assert(is_be_node(irn));
782 if (be_has_frame_entity(irn)) {
783 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
789 void be_set_MemPerm_in_entity(const ir_node *irn, int n, ir_entity *ent)
791 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
793 assert(be_is_MemPerm(irn));
794 assert(n < be_get_MemPerm_entity_arity(irn));
796 attr->in_entities[n] = ent;
799 ir_entity* be_get_MemPerm_in_entity(const ir_node* irn, int n)
801 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
803 assert(be_is_MemPerm(irn));
804 assert(n < be_get_MemPerm_entity_arity(irn));
806 return attr->in_entities[n];
809 void be_set_MemPerm_out_entity(const ir_node *irn, int n, ir_entity *ent)
811 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
813 assert(be_is_MemPerm(irn));
814 assert(n < be_get_MemPerm_entity_arity(irn));
816 attr->out_entities[n] = ent;
819 ir_entity* be_get_MemPerm_out_entity(const ir_node* irn, int n)
821 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
823 assert(be_is_MemPerm(irn));
824 assert(n < be_get_MemPerm_entity_arity(irn));
826 return attr->out_entities[n];
829 int be_get_MemPerm_entity_arity(const ir_node *irn)
831 return get_irn_arity(irn) - 1;
834 const arch_register_req_t *be_create_reg_req(struct obstack *obst,
835 const arch_register_t *reg, arch_register_req_type_t additional_types)
837 arch_register_req_t *req = OALLOC(obst, arch_register_req_t);
838 const arch_register_class_t *cls = arch_register_get_class(reg);
839 unsigned *limited_bitset;
841 limited_bitset = rbitset_obstack_alloc(obst, arch_register_class_n_regs(cls));
842 rbitset_set(limited_bitset, arch_register_get_index(reg));
844 req->type = arch_register_req_type_limited | additional_types;
846 req->limited = limited_bitset;
851 void be_set_constr_single_reg_in(ir_node *node, int pos,
852 const arch_register_t *reg, arch_register_req_type_t additional_types)
854 const arch_register_req_t *req;
856 if (additional_types == 0) {
857 req = reg->single_req;
859 ir_graph *irg = get_irn_irg(node);
860 struct obstack *obst = be_get_be_obst(irg);
861 req = be_create_reg_req(obst, reg, additional_types);
863 be_set_constr_in(node, pos, req);
866 void be_set_constr_single_reg_out(ir_node *node, int pos,
867 const arch_register_t *reg, arch_register_req_type_t additional_types)
869 ir_graph *irg = get_irn_irg(node);
870 be_irg_t *birg = be_birg_from_irg(irg);
871 const arch_register_req_t *req;
873 /* if we have an ignore register, add ignore flag and just assign it */
874 if (!rbitset_is_set(birg->allocatable_regs, reg->global_index)) {
875 additional_types |= arch_register_req_type_ignore;
878 if (additional_types == 0) {
879 req = reg->single_req;
881 struct obstack *obst = be_get_be_obst(irg);
882 req = be_create_reg_req(obst, reg, additional_types);
885 arch_set_irn_register_out(node, pos, reg);
886 be_set_constr_out(node, pos, req);
889 void be_node_set_reg_class_in(ir_node *irn, int pos,
890 const arch_register_class_t *cls)
892 be_set_constr_in(irn, pos, cls->class_req);
895 void be_node_set_reg_class_out(ir_node *irn, int pos,
896 const arch_register_class_t *cls)
898 be_set_constr_out(irn, pos, cls->class_req);
901 ir_node *be_get_IncSP_pred(ir_node *irn)
903 assert(be_is_IncSP(irn));
904 return get_irn_n(irn, 0);
907 void be_set_IncSP_pred(ir_node *incsp, ir_node *pred)
909 assert(be_is_IncSP(incsp));
910 set_irn_n(incsp, 0, pred);
913 void be_set_IncSP_offset(ir_node *irn, int offset)
915 be_incsp_attr_t *a = (be_incsp_attr_t*)get_irn_generic_attr(irn);
916 assert(be_is_IncSP(irn));
920 int be_get_IncSP_offset(const ir_node *irn)
922 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
923 assert(be_is_IncSP(irn));
927 int be_get_IncSP_align(const ir_node *irn)
929 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
930 assert(be_is_IncSP(irn));
934 static ir_entity *be_node_get_frame_entity(const ir_node *irn)
936 return be_get_frame_entity(irn);
939 void be_node_set_frame_entity(ir_node *irn, ir_entity *ent)
943 assert(be_has_frame_entity(irn));
945 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
949 static void be_node_set_frame_offset(ir_node *irn, int offset)
953 if (!be_has_frame_entity(irn))
956 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
960 static int be_node_get_sp_bias(const ir_node *irn)
962 if (be_is_IncSP(irn))
963 return be_get_IncSP_offset(irn);
965 return -(int)be_Call_get_pop(irn);
973 static const arch_irn_ops_t be_node_irn_ops = {
974 be_node_get_frame_entity,
975 be_node_set_frame_offset,
977 NULL, /* get_inverse */
978 NULL, /* get_op_estimated_cost */
979 NULL, /* possible_memory_operand */
980 NULL, /* perform_memory_operand */
983 static int get_start_reg_index(ir_graph *irg, const arch_register_t *reg)
985 ir_node *start = get_irg_start(irg);
986 unsigned n_outs = arch_get_irn_n_outs(start);
989 /* do a naive linear search... */
990 for (i = 0; i < (int)n_outs; ++i) {
991 const arch_register_req_t *out_req
992 = arch_get_irn_register_req_out(start, i);
993 if (! (out_req->type & arch_register_req_type_limited))
995 if (out_req->cls != arch_register_get_class(reg))
997 if (!rbitset_is_set(out_req->limited, reg->index))
1001 panic("Tried querying undefined register '%s' at Start", reg->name);
1004 ir_node *be_get_initial_reg_value(ir_graph *irg, const arch_register_t *reg)
1006 int i = get_start_reg_index(irg, reg);
1007 ir_node *start = get_irg_start(irg);
1008 ir_mode *mode = arch_register_class_mode(arch_register_get_class(reg));
1010 foreach_out_edge(start, edge) {
1011 ir_node *proj = get_edge_src_irn(edge);
1012 if (!is_Proj(proj)) // maybe End/Anchor
1014 if (get_Proj_proj(proj) == i) {
1018 return new_r_Proj(start, mode, i);
1021 int be_find_return_reg_input(ir_node *ret, const arch_register_t *reg)
1023 int arity = get_irn_arity(ret);
1025 /* do a naive linear search... */
1026 for (i = 0; i < arity; ++i) {
1027 const arch_register_req_t *req = arch_get_irn_register_req_in(ret, i);
1028 if (! (req->type & arch_register_req_type_limited))
1030 if (req->cls != arch_register_get_class(reg))
1032 if (!rbitset_is_set(req->limited, reg->index))
1036 panic("Tried querying undefined register '%s' at Return", reg->name);
1039 static ir_entity* dummy_get_frame_entity(const ir_node *node)
1045 static void dummy_set_frame_offset(ir_node *node, int bias)
1049 panic("should not be called");
1052 static int dummy_get_sp_bias(const ir_node *node)
1058 /* for "middleend" nodes */
1059 static const arch_irn_ops_t dummy_be_irn_ops = {
1060 dummy_get_frame_entity,
1061 dummy_set_frame_offset,
1063 NULL, /* get_inverse */
1064 NULL, /* get_op_estimated_cost */
1065 NULL, /* possible_memory_operand */
1066 NULL, /* perform_memory_operand */
1071 ir_node *be_new_Phi(ir_node *block, int n_ins, ir_node **ins, ir_mode *mode,
1072 const arch_register_req_t *req)
1074 ir_graph *irg = get_irn_irg(block);
1075 struct obstack *obst = be_get_be_obst(irg);
1076 backend_info_t *info;
1079 ir_node *phi = new_ir_node(NULL, irg, block, op_Phi, mode, n_ins, ins);
1080 phi->attr.phi.u.backedge = new_backedge_arr(irg->obst, n_ins);
1081 info = be_get_info(phi);
1082 info->out_infos = NEW_ARR_D(reg_out_info_t, obst, 1);
1083 memset(info->out_infos, 0, 1 * sizeof(info->out_infos[0]));
1084 info->in_reqs = OALLOCN(obst, const arch_register_req_t*, n_ins);
1086 info->out_infos[0].req = req;
1087 for (i = 0; i < n_ins; ++i) {
1088 info->in_reqs[i] = req;
1090 irn_verify_irg(phi, irg);
1091 phi = optimize_node(phi);
1095 void be_set_phi_reg_req(ir_node *node, const arch_register_req_t *req)
1097 int arity = get_irn_arity(node);
1100 backend_info_t *info = be_get_info(node);
1101 info->out_infos[0].req = req;
1102 for (i = 0; i < arity; ++i) {
1103 info->in_reqs[i] = req;
1106 assert(mode_is_datab(get_irn_mode(node)));
1109 void be_dump_phi_reg_reqs(FILE *F, const ir_node *node, dump_reason_t reason)
1112 case dump_node_opcode_txt:
1113 fputs(get_op_name(get_irn_op(node)), F);
1115 case dump_node_mode_txt:
1116 fprintf(F, "%s", get_mode_name(get_irn_mode(node)));
1118 case dump_node_nodeattr_txt:
1120 case dump_node_info_txt:
1122 backend_info_t *info = be_get_info(node);
1123 if (info != NULL && info->out_infos[0].req != NULL) {
1124 arch_dump_reqs_and_registers(F, node);
1134 static const arch_irn_ops_t phi_irn_ops = {
1135 dummy_get_frame_entity,
1136 dummy_set_frame_offset,
1138 NULL, /* get_inverse */
1139 NULL, /* get_op_estimated_cost */
1140 NULL, /* possible_memory_operand */
1141 NULL, /* perform_memory_operand */
1147 * ir_op-Operation: dump a be node to file
1149 static void dump_node(FILE *f, const ir_node *irn, dump_reason_t reason)
1151 assert(is_be_node(irn));
1154 case dump_node_opcode_txt:
1155 fputs(get_op_name(get_irn_op(irn)), f);
1157 case dump_node_mode_txt:
1158 if (be_is_Copy(irn) || be_is_CopyKeep(irn)) {
1159 fprintf(f, "%s", get_mode_name(get_irn_mode(irn)));
1162 case dump_node_nodeattr_txt:
1163 if (be_is_Call(irn)) {
1164 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(irn);
1166 fprintf(f, " [%s] ", get_entity_name(a->ent));
1168 if (be_is_IncSP(irn)) {
1169 const be_incsp_attr_t *attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
1170 fprintf(f, " [%d] ", attr->offset);
1173 case dump_node_info_txt:
1174 arch_dump_reqs_and_registers(f, irn);
1176 if (be_has_frame_entity(irn)) {
1177 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
1179 unsigned size = get_type_size_bytes(get_entity_type(a->ent));
1180 ir_fprintf(f, "frame entity: %+F, offset 0x%x (%d), size 0x%x (%d) bytes\n",
1181 a->ent, a->offset, a->offset, size, size);
1186 switch (get_irn_opcode(irn)) {
1188 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
1189 fprintf(f, "align: %d\n", a->align);
1190 fprintf(f, "offset: %d\n", a->offset);
1194 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(irn);
1197 fprintf(f, "\ncalling: %s\n", get_entity_name(a->ent));
1202 for (i = 0; i < be_get_MemPerm_entity_arity(irn); ++i) {
1203 ir_entity *in, *out;
1204 in = be_get_MemPerm_in_entity(irn, i);
1205 out = be_get_MemPerm_out_entity(irn, i);
1207 fprintf(f, "\nin[%d]: %s\n", i, get_entity_name(in));
1210 fprintf(f, "\nout[%d]: %s\n", i, get_entity_name(out));
1224 * Copies the backend specific attributes from old node to new node.
1226 static void copy_attr(ir_graph *irg, const ir_node *old_node, ir_node *new_node)
1228 const void *old_attr = get_irn_generic_attr_const(old_node);
1229 void *new_attr = get_irn_generic_attr(new_node);
1230 struct obstack *obst = be_get_be_obst(irg);
1231 backend_info_t *old_info = be_get_info(old_node);
1232 backend_info_t *new_info = be_get_info(new_node);
1234 assert(is_be_node(old_node));
1235 assert(is_be_node(new_node));
1237 memcpy(new_attr, old_attr, get_op_attr_size(get_irn_op(old_node)));
1239 new_info->flags = old_info->flags;
1240 if (old_info->out_infos != NULL) {
1241 size_t n_outs = ARR_LEN(old_info->out_infos);
1242 /* need dyanmic out infos? */
1243 if (be_is_Perm(new_node)) {
1244 new_info->out_infos = NEW_ARR_F(reg_out_info_t, n_outs);
1246 new_info->out_infos = NEW_ARR_D(reg_out_info_t, obst, n_outs);
1248 memcpy(new_info->out_infos, old_info->out_infos,
1249 n_outs * sizeof(new_info->out_infos[0]));
1251 new_info->out_infos = NULL;
1255 if (old_info->in_reqs != NULL) {
1256 unsigned n_ins = get_irn_arity(old_node);
1257 /* need dynamic in infos? */
1258 if (get_irn_op(old_node)->opar == oparity_dynamic) {
1259 new_info->in_reqs = NEW_ARR_F(const arch_register_req_t*, n_ins);
1261 new_info->in_reqs = OALLOCN(obst,const arch_register_req_t*, n_ins);
1263 memcpy(new_info->in_reqs, old_info->in_reqs,
1264 n_ins * sizeof(new_info->in_reqs[0]));
1266 new_info->in_reqs = NULL;
1270 int is_be_node(const ir_node *irn)
1272 return get_op_ops(get_irn_op(irn))->be_ops == &be_node_irn_ops;
1275 static ir_op *new_be_op(unsigned code, const char *name, op_pin_state p,
1276 irop_flags flags, op_arity opar, size_t attr_size)
1278 ir_op *res = new_ir_op(code, name, p, flags, opar, 0, attr_size);
1279 res->ops.dump_node = dump_node;
1280 res->ops.copy_attr = copy_attr;
1281 res->ops.be_ops = &be_node_irn_ops;
1285 void be_init_op(void)
1289 assert(op_be_Spill == NULL);
1291 /* Acquire all needed opcodes. */
1292 op_be_Spill = new_be_op(beo_Spill, "be_Spill", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_frame_attr_t));
1293 op_be_Reload = new_be_op(beo_Reload, "be_Reload", op_pin_state_exc_pinned, irop_flag_none, oparity_zero, sizeof(be_frame_attr_t));
1294 op_be_Perm = new_be_op(beo_Perm, "be_Perm", op_pin_state_exc_pinned, irop_flag_none, oparity_variable, sizeof(be_node_attr_t));
1295 op_be_MemPerm = new_be_op(beo_MemPerm, "be_MemPerm", op_pin_state_exc_pinned, irop_flag_none, oparity_variable, sizeof(be_memperm_attr_t));
1296 op_be_Copy = new_be_op(beo_Copy, "be_Copy", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1297 op_be_Keep = new_be_op(beo_Keep, "be_Keep", op_pin_state_exc_pinned, irop_flag_keep, oparity_dynamic, sizeof(be_node_attr_t));
1298 op_be_CopyKeep = new_be_op(beo_CopyKeep, "be_CopyKeep", op_pin_state_exc_pinned, irop_flag_keep, oparity_variable, sizeof(be_node_attr_t));
1299 op_be_Call = new_be_op(beo_Call, "be_Call", op_pin_state_exc_pinned, irop_flag_fragile|irop_flag_uses_memory, oparity_variable, sizeof(be_call_attr_t));
1300 ir_op_set_memory_index(op_be_Call, n_be_Call_mem);
1301 ir_op_set_fragile_indices(op_be_Call, pn_be_Call_X_regular, pn_be_Call_X_except);
1302 op_be_Return = new_be_op(beo_Return, "be_Return", op_pin_state_exc_pinned, irop_flag_cfopcode, oparity_variable, sizeof(be_return_attr_t));
1303 op_be_AddSP = new_be_op(beo_AddSP, "be_AddSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1304 op_be_SubSP = new_be_op(beo_SubSP, "be_SubSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1305 op_be_IncSP = new_be_op(beo_IncSP, "be_IncSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_incsp_attr_t));
1306 op_be_Start = new_be_op(beo_Start, "be_Start", op_pin_state_exc_pinned, irop_flag_none, oparity_zero, sizeof(be_node_attr_t));
1307 op_be_FrameAddr = new_be_op(beo_FrameAddr, "be_FrameAddr", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_frame_attr_t));
1309 op_be_Spill->ops.node_cmp_attr = FrameAddr_cmp_attr;
1310 op_be_Reload->ops.node_cmp_attr = FrameAddr_cmp_attr;
1311 op_be_Perm->ops.node_cmp_attr = be_nodes_equal;
1312 op_be_MemPerm->ops.node_cmp_attr = be_nodes_equal;
1313 op_be_Copy->ops.node_cmp_attr = be_nodes_equal;
1314 op_be_Keep->ops.node_cmp_attr = be_nodes_equal;
1315 op_be_CopyKeep->ops.node_cmp_attr = be_nodes_equal;
1316 op_be_Call->ops.node_cmp_attr = Call_cmp_attr;
1317 op_be_Return->ops.node_cmp_attr = Return_cmp_attr;
1318 op_be_AddSP->ops.node_cmp_attr = be_nodes_equal;
1319 op_be_SubSP->ops.node_cmp_attr = be_nodes_equal;
1320 op_be_IncSP->ops.node_cmp_attr = IncSP_cmp_attr;
1321 op_be_Start->ops.node_cmp_attr = be_nodes_equal;
1322 op_be_FrameAddr->ops.node_cmp_attr = FrameAddr_cmp_attr;
1324 /* attach out dummy_ops to middle end nodes */
1325 for (opc = iro_First; opc <= iro_Last; ++opc) {
1326 ir_op *op = ir_get_opcode(opc);
1327 assert(op->ops.be_ops == NULL);
1328 op->ops.be_ops = &dummy_be_irn_ops;
1331 op_Phi->ops.be_ops = &phi_irn_ops;
1334 void be_finish_op(void)
1336 free_ir_op(op_be_Spill); op_be_Spill = NULL;
1337 free_ir_op(op_be_Reload); op_be_Reload = NULL;
1338 free_ir_op(op_be_Perm); op_be_Perm = NULL;
1339 free_ir_op(op_be_MemPerm); op_be_MemPerm = NULL;
1340 free_ir_op(op_be_Copy); op_be_Copy = NULL;
1341 free_ir_op(op_be_Keep); op_be_Keep = NULL;
1342 free_ir_op(op_be_CopyKeep); op_be_CopyKeep = NULL;
1343 free_ir_op(op_be_Call); op_be_Call = NULL;
1344 free_ir_op(op_be_Return); op_be_Return = NULL;
1345 free_ir_op(op_be_IncSP); op_be_IncSP = NULL;
1346 free_ir_op(op_be_AddSP); op_be_AddSP = NULL;
1347 free_ir_op(op_be_SubSP); op_be_SubSP = NULL;
1348 free_ir_op(op_be_Start); op_be_Start = NULL;
1349 free_ir_op(op_be_FrameAddr); op_be_FrameAddr = NULL;