2 * Copyright (C) 1995-2011 University of Karlsruhe. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * @brief Backend node support for generic backend nodes.
23 * @author Sebastian Hack
26 * Backend node support for generic backend nodes.
27 * This file provides Perm, Copy, Spill and Reload nodes.
39 #include "bitfiddle.h"
40 #include "raw_bitset.h"
51 #include "irbackedge_t.h"
52 #include "irverify_t.h"
62 typedef struct be_node_attr_t {
66 /** The be_Return nodes attribute type. */
69 int num_ret_vals; /**< number of return values */
70 unsigned pop; /**< number of bytes that should be popped */
71 int emit_pop; /**< if set, emit pop bytes, even if pop = 0 */
74 /** The be_IncSP attribute type. */
77 int offset; /**< The offset by which the stack shall be
79 int align; /**< whether stack should be aligned after the
83 /** The be_Frame attribute type. */
90 /** The be_Call attribute type. */
93 ir_entity *ent; /**< called entity if this is a static call. */
95 ir_type *call_tp; /**< call type, copied from the original Call */
100 ir_entity **in_entities;
101 ir_entity **out_entities;
107 ir_op *op_be_MemPerm;
110 ir_op *op_be_CopyKeep;
117 ir_op *op_be_FrameAddr;
120 * Compare the attributes of two be_FrameAddr nodes.
122 * @return zero if both nodes have identically attributes
124 static int FrameAddr_cmp_attr(const ir_node *a, const ir_node *b)
126 const be_frame_attr_t *a_attr = (const be_frame_attr_t*)get_irn_generic_attr_const(a);
127 const be_frame_attr_t *b_attr = (const be_frame_attr_t*)get_irn_generic_attr_const(b);
129 if (a_attr->ent != b_attr->ent || a_attr->offset != b_attr->offset)
132 return be_nodes_equal(a, b);
136 * Compare the attributes of two be_Return nodes.
138 * @return zero if both nodes have identically attributes
140 static int Return_cmp_attr(const ir_node *a, const ir_node *b)
142 const be_return_attr_t *a_attr = (const be_return_attr_t*)get_irn_generic_attr_const(a);
143 const be_return_attr_t *b_attr = (const be_return_attr_t*)get_irn_generic_attr_const(b);
145 if (a_attr->num_ret_vals != b_attr->num_ret_vals)
147 if (a_attr->pop != b_attr->pop)
149 if (a_attr->emit_pop != b_attr->emit_pop)
152 return be_nodes_equal(a, b);
156 * Compare the attributes of two be_IncSP nodes.
158 * @return zero if both nodes have identically attributes
160 static int IncSP_cmp_attr(const ir_node *a, const ir_node *b)
162 const be_incsp_attr_t *a_attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(a);
163 const be_incsp_attr_t *b_attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(b);
165 if (a_attr->offset != b_attr->offset)
168 return be_nodes_equal(a, b);
172 * Compare the attributes of two be_Call nodes.
174 * @return zero if both nodes have identically attributes
176 static int Call_cmp_attr(const ir_node *a, const ir_node *b)
178 const be_call_attr_t *a_attr = (const be_call_attr_t*)get_irn_generic_attr_const(a);
179 const be_call_attr_t *b_attr = (const be_call_attr_t*)get_irn_generic_attr_const(b);
181 if (a_attr->ent != b_attr->ent ||
182 a_attr->call_tp != b_attr->call_tp)
185 return be_nodes_equal(a, b);
188 static arch_register_req_t *allocate_reg_req(const ir_node *node)
190 ir_graph *irg = get_irn_irg(node);
191 struct obstack *obst = be_get_be_obst(irg);
193 arch_register_req_t *req = OALLOCZ(obst, arch_register_req_t);
197 void be_set_constr_in(ir_node *node, int pos, const arch_register_req_t *req)
199 backend_info_t *info = be_get_info(node);
200 assert(pos < get_irn_arity(node));
201 info->in_reqs[pos] = req;
204 void be_set_constr_out(ir_node *node, int pos, const arch_register_req_t *req)
206 backend_info_t *info = be_get_info(node);
207 info->out_infos[pos].req = req;
211 * Initializes the generic attribute of all be nodes and return it.
213 static void init_node_attr(ir_node *node, int n_inputs, int n_outputs)
215 ir_graph *irg = get_irn_irg(node);
216 struct obstack *obst = be_get_be_obst(irg);
217 backend_info_t *info = be_get_info(node);
218 const arch_register_req_t **in_reqs;
222 assert(n_inputs == get_irn_arity(node));
223 in_reqs = OALLOCN(obst, const arch_register_req_t*, n_inputs);
224 for (i = 0; i < n_inputs; ++i) {
225 in_reqs[i] = arch_no_register_req;
228 in_reqs = NEW_ARR_F(const arch_register_req_t*, 0);
230 info->in_reqs = in_reqs;
232 if (n_outputs >= 0) {
234 info->out_infos = NEW_ARR_D(reg_out_info_t, obst, n_outputs);
235 memset(info->out_infos, 0, n_outputs * sizeof(info->out_infos[0]));
236 for (i = 0; i < n_outputs; ++i) {
237 info->out_infos[i].req = arch_no_register_req;
240 info->out_infos = NEW_ARR_F(reg_out_info_t, 0);
244 static void add_register_req_in(ir_node *node)
246 backend_info_t *info = be_get_info(node);
247 ARR_APP1(const arch_register_req_t*, info->in_reqs, arch_no_register_req);
250 ir_node *be_new_Spill(const arch_register_class_t *cls,
251 const arch_register_class_t *cls_frame, ir_node *bl,
252 ir_node *frame, ir_node *to_spill)
257 ir_graph *irg = get_Block_irg(bl);
261 res = new_ir_node(NULL, irg, bl, op_be_Spill, mode_M, 2, in);
262 init_node_attr(res, 2, 1);
263 a = (be_frame_attr_t*) get_irn_generic_attr(res);
266 a->base.exc.pin_state = op_pin_state_pinned;
268 be_node_set_reg_class_in(res, n_be_Spill_frame, cls_frame);
269 be_node_set_reg_class_in(res, n_be_Spill_val, cls);
270 arch_set_irn_register_req_out(res, 0, arch_no_register_req);
275 ir_node *be_new_Reload(const arch_register_class_t *cls,
276 const arch_register_class_t *cls_frame, ir_node *block,
277 ir_node *frame, ir_node *mem, ir_mode *mode)
281 ir_graph *irg = get_Block_irg(block);
286 res = new_ir_node(NULL, irg, block, op_be_Reload, mode, 2, in);
288 init_node_attr(res, 2, 1);
289 be_node_set_reg_class_out(res, 0, cls);
291 be_node_set_reg_class_in(res, n_be_Reload_frame, cls_frame);
292 arch_set_irn_flags(res, arch_irn_flags_rematerializable);
294 a = (be_frame_attr_t*) get_irn_generic_attr(res);
297 a->base.exc.pin_state = op_pin_state_pinned;
302 ir_node *be_get_Reload_mem(const ir_node *irn)
304 assert(be_is_Reload(irn));
305 return get_irn_n(irn, n_be_Reload_mem);
308 ir_node *be_get_Reload_frame(const ir_node *irn)
310 assert(be_is_Reload(irn));
311 return get_irn_n(irn, n_be_Reload_frame);
314 ir_node *be_get_Spill_val(const ir_node *irn)
316 assert(be_is_Spill(irn));
317 return get_irn_n(irn, n_be_Spill_val);
320 ir_node *be_get_Spill_frame(const ir_node *irn)
322 assert(be_is_Spill(irn));
323 return get_irn_n(irn, n_be_Spill_frame);
326 ir_node *be_new_Perm(const arch_register_class_t *cls, ir_node *block,
327 int n, ir_node *in[])
330 ir_graph *irg = get_Block_irg(block);
331 be_node_attr_t *attr;
333 ir_node *irn = new_ir_node(NULL, irg, block, op_be_Perm, mode_T, n, in);
334 init_node_attr(irn, n, n);
335 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
336 attr->exc.pin_state = op_pin_state_pinned;
337 for (i = 0; i < n; ++i) {
338 const ir_node *input = in[i];
339 const arch_register_req_t *req = arch_get_irn_register_req(input);
340 if (req->width == 1) {
341 be_set_constr_in(irn, i, cls->class_req);
342 be_set_constr_out(irn, i, cls->class_req);
344 arch_register_req_t *new_req = allocate_reg_req(irn);
346 new_req->type = (req->type & arch_register_req_type_aligned);
347 new_req->width = req->width;
348 be_set_constr_in(irn, i, new_req);
349 be_set_constr_out(irn, i, new_req);
356 void be_Perm_reduce(ir_node *perm, int new_size, int *map)
358 int arity = get_irn_arity(perm);
359 const arch_register_req_t **old_in_reqs
360 = ALLOCAN(const arch_register_req_t*, arity);
361 reg_out_info_t *old_infos = ALLOCAN(reg_out_info_t, arity);
362 backend_info_t *info = be_get_info(perm);
366 assert(be_is_Perm(perm));
367 assert(new_size <= arity);
369 new_in = ALLOCAN(ir_node*, new_size);
371 /* save the old register data */
372 memcpy(old_in_reqs, info->in_reqs, arity * sizeof(old_in_reqs[0]));
373 memcpy(old_infos, info->out_infos, arity * sizeof(old_infos[0]));
375 /* compose the new in array and set the new register data directly */
376 for (i = 0; i < new_size; ++i) {
378 new_in[i] = get_irn_n(perm, idx);
379 info->in_reqs[i] = old_in_reqs[idx];
380 info->out_infos[i] = old_infos[idx];
383 set_irn_in(perm, new_size, new_in);
386 ir_node *be_new_MemPerm(ir_node *block, int n, ir_node *in[])
388 ir_graph *irg = get_Block_irg(block);
389 const arch_env_t *arch_env = be_get_irg_arch_env(irg);
390 ir_node *frame = get_irg_frame(irg);
391 const arch_register_t *sp = arch_env->sp;
393 be_memperm_attr_t *attr;
396 real_in = ALLOCAN(ir_node*, n + 1);
398 memcpy(&real_in[1], in, n * sizeof(real_in[0]));
400 irn = new_ir_node(NULL, irg, block, op_be_MemPerm, mode_T, n+1, real_in);
402 init_node_attr(irn, n + 1, n);
403 be_node_set_reg_class_in(irn, 0, sp->reg_class);
405 attr = (be_memperm_attr_t*)get_irn_generic_attr(irn);
406 attr->in_entities = OALLOCNZ(irg->obst, ir_entity*, n);
407 attr->out_entities = OALLOCNZ(irg->obst, ir_entity*, n);
412 ir_node *be_new_Copy(ir_node *bl, ir_node *op)
416 arch_register_req_t *req;
417 be_node_attr_t *attr;
418 ir_graph *irg = get_Block_irg(bl);
419 const arch_register_req_t *in_req = arch_get_irn_register_req(op);
420 const arch_register_class_t *cls = in_req->cls;
423 res = new_ir_node(NULL, irg, bl, op_be_Copy, get_irn_mode(op), 1, in);
424 init_node_attr(res, 1, 1);
425 attr = (be_node_attr_t*) get_irn_generic_attr(res);
426 attr->exc.pin_state = op_pin_state_floats;
427 be_node_set_reg_class_in(res, 0, cls);
428 be_node_set_reg_class_out(res, 0, cls);
430 req = allocate_reg_req(res);
432 req->type = arch_register_req_type_should_be_same
433 | (in_req->type & arch_register_req_type_aligned);
434 req->other_same = 1U << 0;
435 req->width = in_req->width;
436 be_set_constr_out(res, 0, req);
441 ir_node *be_get_Copy_op(const ir_node *cpy)
443 return get_irn_n(cpy, n_be_Copy_op);
446 void be_set_Copy_op(ir_node *cpy, ir_node *op)
448 set_irn_n(cpy, n_be_Copy_op, op);
451 ir_node *be_new_Keep(ir_node *block, int n, ir_node *in[])
455 ir_graph *irg = get_Block_irg(block);
456 be_node_attr_t *attr;
458 res = new_ir_node(NULL, irg, block, op_be_Keep, mode_ANY, -1, NULL);
459 init_node_attr(res, -1, 1);
460 attr = (be_node_attr_t*) get_irn_generic_attr(res);
461 attr->exc.pin_state = op_pin_state_pinned;
463 for (i = 0; i < n; ++i) {
464 add_irn_n(res, in[i]);
465 add_register_req_in(res);
472 void be_Keep_add_node(ir_node *keep, const arch_register_class_t *cls, ir_node *node)
476 assert(be_is_Keep(keep));
477 n = add_irn_n(keep, node);
478 add_register_req_in(keep);
479 be_node_set_reg_class_in(keep, n, cls);
482 ir_node *be_new_Call(dbg_info *dbg, ir_graph *irg, ir_node *bl, ir_node *mem,
483 const arch_register_req_t *sp_req, ir_node *sp,
484 const arch_register_req_t *ptr_req, ir_node *ptr,
485 int n_outs, int n, ir_node *in[], ir_type *call_tp)
488 int real_n = n_be_Call_first_arg + n;
492 NEW_ARR_A(ir_node *, real_in, real_n);
493 real_in[n_be_Call_mem] = mem;
494 real_in[n_be_Call_sp] = sp;
495 real_in[n_be_Call_ptr] = ptr;
496 memcpy(&real_in[n_be_Call_first_arg], in, n * sizeof(in[0]));
498 irn = new_ir_node(dbg, irg, bl, op_be_Call, mode_T, real_n, real_in);
499 init_node_attr(irn, real_n, n_outs);
500 a = (be_call_attr_t*)get_irn_generic_attr(irn);
502 a->call_tp = call_tp;
504 a->base.exc.pin_state = op_pin_state_pinned;
505 be_set_constr_in(irn, n_be_Call_sp, sp_req);
506 be_set_constr_in(irn, n_be_Call_ptr, ptr_req);
510 ir_entity *be_Call_get_entity(const ir_node *call)
512 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
513 assert(be_is_Call(call));
517 void be_Call_set_entity(ir_node *call, ir_entity *ent)
519 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
520 assert(be_is_Call(call));
524 ir_type *be_Call_get_type(ir_node *call)
526 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
527 assert(be_is_Call(call));
531 void be_Call_set_type(ir_node *call, ir_type *call_tp)
533 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
534 assert(be_is_Call(call));
535 a->call_tp = call_tp;
538 void be_Call_set_pop(ir_node *call, unsigned pop)
540 be_call_attr_t *a = (be_call_attr_t*)get_irn_generic_attr(call);
544 unsigned be_Call_get_pop(const ir_node *call)
546 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(call);
550 ir_node *be_new_Return(dbg_info *dbg, ir_graph *irg, ir_node *block, int n_res,
551 unsigned pop, int n, ir_node *in[])
556 res = new_ir_node(dbg, irg, block, op_be_Return, mode_X, n, in);
557 init_node_attr(res, n, 1);
558 be_set_constr_out(res, 0, arch_no_register_req);
560 a = (be_return_attr_t*)get_irn_generic_attr(res);
561 a->num_ret_vals = n_res;
564 a->base.exc.pin_state = op_pin_state_pinned;
569 int be_Return_get_n_rets(const ir_node *ret)
571 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
572 return a->num_ret_vals;
575 unsigned be_Return_get_pop(const ir_node *ret)
577 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
581 int be_Return_get_emit_pop(const ir_node *ret)
583 const be_return_attr_t *a = (const be_return_attr_t*)get_irn_generic_attr_const(ret);
587 void be_Return_set_emit_pop(ir_node *ret, int emit_pop)
589 be_return_attr_t *a = (be_return_attr_t*)get_irn_generic_attr(ret);
590 a->emit_pop = emit_pop;
593 ir_node *be_new_IncSP(const arch_register_t *sp, ir_node *bl,
594 ir_node *old_sp, int offset, int align)
599 ir_graph *irg = get_Block_irg(bl);
602 irn = new_ir_node(NULL, irg, bl, op_be_IncSP, sp->reg_class->mode,
604 init_node_attr(irn, 1, 1);
605 a = (be_incsp_attr_t*)get_irn_generic_attr(irn);
608 a->base.exc.pin_state = op_pin_state_pinned;
610 /* Set output constraint to stack register. */
611 be_node_set_reg_class_in(irn, 0, sp->reg_class);
612 be_set_constr_single_reg_out(irn, 0, sp, arch_register_req_type_produces_sp);
617 ir_node *be_new_AddSP(const arch_register_t *sp, ir_node *bl, ir_node *old_sp,
621 ir_node *in[n_be_AddSP_last];
623 be_node_attr_t *attr;
625 in[n_be_AddSP_old_sp] = old_sp;
626 in[n_be_AddSP_size] = sz;
628 irg = get_Block_irg(bl);
629 irn = new_ir_node(NULL, irg, bl, op_be_AddSP, mode_T, n_be_AddSP_last, in);
630 init_node_attr(irn, n_be_AddSP_last, pn_be_AddSP_last);
631 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
632 attr->exc.pin_state = op_pin_state_pinned;
634 /* Set output constraint to stack register. */
635 be_set_constr_single_reg_in(irn, n_be_AddSP_old_sp, sp,
636 arch_register_req_type_none);
637 be_node_set_reg_class_in(irn, n_be_AddSP_size, sp->reg_class);
638 be_set_constr_single_reg_out(irn, pn_be_AddSP_sp, sp,
639 arch_register_req_type_produces_sp);
644 ir_node *be_new_SubSP(const arch_register_t *sp, ir_node *bl, ir_node *old_sp, ir_node *sz)
647 ir_node *in[n_be_SubSP_last];
649 be_node_attr_t *attr;
651 in[n_be_SubSP_old_sp] = old_sp;
652 in[n_be_SubSP_size] = sz;
654 irg = get_Block_irg(bl);
655 irn = new_ir_node(NULL, irg, bl, op_be_SubSP, mode_T, n_be_SubSP_last, in);
656 init_node_attr(irn, n_be_SubSP_last, pn_be_SubSP_last);
657 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
658 attr->exc.pin_state = op_pin_state_pinned;
660 /* Set output constraint to stack register. */
661 be_set_constr_single_reg_in(irn, n_be_SubSP_old_sp, sp,
662 arch_register_req_type_none);
663 be_node_set_reg_class_in(irn, n_be_SubSP_size, sp->reg_class);
664 be_set_constr_single_reg_out(irn, pn_be_SubSP_sp, sp, arch_register_req_type_produces_sp);
669 ir_node *be_new_Start(dbg_info *dbgi, ir_node *bl, int n_outs)
672 ir_graph *irg = get_Block_irg(bl);
673 be_node_attr_t *attr;
675 res = new_ir_node(dbgi, irg, bl, op_be_Start, mode_T, 0, NULL);
676 init_node_attr(res, 0, n_outs);
677 attr = (be_node_attr_t*) get_irn_generic_attr(res);
678 attr->exc.pin_state = op_pin_state_pinned;
683 ir_node *be_new_FrameAddr(const arch_register_class_t *cls_frame, ir_node *bl, ir_node *frame, ir_entity *ent)
688 ir_graph *irg = get_Block_irg(bl);
691 irn = new_ir_node(NULL, irg, bl, op_be_FrameAddr, get_irn_mode(frame), 1, in);
692 init_node_attr(irn, 1, 1);
693 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
696 a->base.exc.pin_state = op_pin_state_floats;
697 be_node_set_reg_class_in(irn, 0, cls_frame);
698 be_node_set_reg_class_out(irn, 0, cls_frame);
700 return optimize_node(irn);
703 ir_node *be_get_FrameAddr_frame(const ir_node *node)
705 assert(be_is_FrameAddr(node));
706 return get_irn_n(node, n_be_FrameAddr_ptr);
709 ir_entity *be_get_FrameAddr_entity(const ir_node *node)
711 const be_frame_attr_t *attr = (const be_frame_attr_t*)get_irn_generic_attr_const(node);
715 ir_node *be_new_CopyKeep(ir_node *bl, ir_node *src, int n, ir_node *in_keep[])
718 ir_node **in = ALLOCAN(ir_node*, n + 1);
719 ir_graph *irg = get_Block_irg(bl);
720 const arch_register_req_t *req = arch_get_irn_register_req(src);
721 const arch_register_class_t *cls = req->cls;
722 ir_mode *mode = get_irn_mode(src);
723 be_node_attr_t *attr;
726 memcpy(&in[1], in_keep, n * sizeof(in[0]));
727 irn = new_ir_node(NULL, irg, bl, op_be_CopyKeep, mode, n + 1, in);
728 init_node_attr(irn, n + 1, 1);
729 attr = (be_node_attr_t*) get_irn_generic_attr(irn);
730 attr->exc.pin_state = op_pin_state_floats;
731 be_node_set_reg_class_in(irn, 0, cls);
732 be_node_set_reg_class_out(irn, 0, cls);
737 ir_node *be_new_CopyKeep_single(ir_node *bl, ir_node *src, ir_node *keep)
739 return be_new_CopyKeep(bl, src, 1, &keep);
742 ir_node *be_get_CopyKeep_op(const ir_node *cpy)
744 return get_irn_n(cpy, n_be_CopyKeep_op);
747 void be_set_CopyKeep_op(ir_node *cpy, ir_node *op)
749 set_irn_n(cpy, n_be_CopyKeep_op, op);
752 static bool be_has_frame_entity(const ir_node *irn)
754 switch (get_irn_opcode(irn)) {
764 ir_entity *be_get_frame_entity(const ir_node *irn)
766 if (be_has_frame_entity(irn)) {
767 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
773 int be_get_frame_offset(const ir_node *irn)
775 assert(is_be_node(irn));
776 if (be_has_frame_entity(irn)) {
777 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
783 void be_set_MemPerm_in_entity(const ir_node *irn, int n, ir_entity *ent)
785 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
787 assert(be_is_MemPerm(irn));
788 assert(n < be_get_MemPerm_entity_arity(irn));
790 attr->in_entities[n] = ent;
793 ir_entity* be_get_MemPerm_in_entity(const ir_node* irn, int n)
795 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
797 assert(be_is_MemPerm(irn));
798 assert(n < be_get_MemPerm_entity_arity(irn));
800 return attr->in_entities[n];
803 void be_set_MemPerm_out_entity(const ir_node *irn, int n, ir_entity *ent)
805 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
807 assert(be_is_MemPerm(irn));
808 assert(n < be_get_MemPerm_entity_arity(irn));
810 attr->out_entities[n] = ent;
813 ir_entity* be_get_MemPerm_out_entity(const ir_node* irn, int n)
815 const be_memperm_attr_t *attr = (const be_memperm_attr_t*)get_irn_generic_attr_const(irn);
817 assert(be_is_MemPerm(irn));
818 assert(n < be_get_MemPerm_entity_arity(irn));
820 return attr->out_entities[n];
823 int be_get_MemPerm_entity_arity(const ir_node *irn)
825 return get_irn_arity(irn) - 1;
828 const arch_register_req_t *be_create_reg_req(struct obstack *obst,
829 const arch_register_t *reg, arch_register_req_type_t additional_types)
831 arch_register_req_t *req = OALLOC(obst, arch_register_req_t);
832 const arch_register_class_t *cls = arch_register_get_class(reg);
833 unsigned *limited_bitset;
835 limited_bitset = rbitset_obstack_alloc(obst, arch_register_class_n_regs(cls));
836 rbitset_set(limited_bitset, arch_register_get_index(reg));
838 req->type = arch_register_req_type_limited | additional_types;
840 req->limited = limited_bitset;
845 void be_set_constr_single_reg_in(ir_node *node, int pos,
846 const arch_register_t *reg, arch_register_req_type_t additional_types)
848 const arch_register_req_t *req;
850 if (additional_types == 0) {
851 req = reg->single_req;
853 ir_graph *irg = get_irn_irg(node);
854 struct obstack *obst = be_get_be_obst(irg);
855 req = be_create_reg_req(obst, reg, additional_types);
857 be_set_constr_in(node, pos, req);
860 void be_set_constr_single_reg_out(ir_node *node, int pos,
861 const arch_register_t *reg, arch_register_req_type_t additional_types)
863 ir_graph *irg = get_irn_irg(node);
864 be_irg_t *birg = be_birg_from_irg(irg);
865 const arch_register_req_t *req;
867 /* if we have an ignore register, add ignore flag and just assign it */
868 if (!rbitset_is_set(birg->allocatable_regs, reg->global_index)) {
869 additional_types |= arch_register_req_type_ignore;
872 if (additional_types == 0) {
873 req = reg->single_req;
875 struct obstack *obst = be_get_be_obst(irg);
876 req = be_create_reg_req(obst, reg, additional_types);
879 arch_set_irn_register_out(node, pos, reg);
880 be_set_constr_out(node, pos, req);
883 void be_node_set_reg_class_in(ir_node *irn, int pos,
884 const arch_register_class_t *cls)
886 be_set_constr_in(irn, pos, cls->class_req);
889 void be_node_set_reg_class_out(ir_node *irn, int pos,
890 const arch_register_class_t *cls)
892 be_set_constr_out(irn, pos, cls->class_req);
895 ir_node *be_get_IncSP_pred(ir_node *irn)
897 assert(be_is_IncSP(irn));
898 return get_irn_n(irn, 0);
901 void be_set_IncSP_pred(ir_node *incsp, ir_node *pred)
903 assert(be_is_IncSP(incsp));
904 set_irn_n(incsp, 0, pred);
907 void be_set_IncSP_offset(ir_node *irn, int offset)
909 be_incsp_attr_t *a = (be_incsp_attr_t*)get_irn_generic_attr(irn);
910 assert(be_is_IncSP(irn));
914 int be_get_IncSP_offset(const ir_node *irn)
916 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
917 assert(be_is_IncSP(irn));
921 int be_get_IncSP_align(const ir_node *irn)
923 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
924 assert(be_is_IncSP(irn));
928 static ir_entity *be_node_get_frame_entity(const ir_node *irn)
930 return be_get_frame_entity(irn);
933 void be_node_set_frame_entity(ir_node *irn, ir_entity *ent)
937 assert(be_has_frame_entity(irn));
939 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
943 static void be_node_set_frame_offset(ir_node *irn, int offset)
947 if (!be_has_frame_entity(irn))
950 a = (be_frame_attr_t*)get_irn_generic_attr(irn);
954 static int be_node_get_sp_bias(const ir_node *irn)
956 if (be_is_IncSP(irn))
957 return be_get_IncSP_offset(irn);
959 return -(int)be_Call_get_pop(irn);
967 static const arch_irn_ops_t be_node_irn_ops = {
968 be_node_get_frame_entity,
969 be_node_set_frame_offset,
971 NULL, /* get_inverse */
972 NULL, /* get_op_estimated_cost */
973 NULL, /* possible_memory_operand */
974 NULL, /* perform_memory_operand */
977 static int get_start_reg_index(ir_graph *irg, const arch_register_t *reg)
979 ir_node *start = get_irg_start(irg);
980 unsigned n_outs = arch_get_irn_n_outs(start);
983 /* do a naive linear search... */
984 for (i = 0; i < (int)n_outs; ++i) {
985 const arch_register_req_t *out_req
986 = arch_get_irn_register_req_out(start, i);
987 if (! (out_req->type & arch_register_req_type_limited))
989 if (out_req->cls != arch_register_get_class(reg))
991 if (!rbitset_is_set(out_req->limited, reg->index))
995 panic("Tried querying undefined register '%s' at Start", reg->name);
998 ir_node *be_get_initial_reg_value(ir_graph *irg, const arch_register_t *reg)
1000 int i = get_start_reg_index(irg, reg);
1001 ir_node *start = get_irg_start(irg);
1002 ir_mode *mode = arch_register_class_mode(arch_register_get_class(reg));
1004 foreach_out_edge(start, edge) {
1005 ir_node *proj = get_edge_src_irn(edge);
1006 if (!is_Proj(proj)) // maybe End/Anchor
1008 if (get_Proj_proj(proj) == i) {
1012 return new_r_Proj(start, mode, i);
1015 int be_find_return_reg_input(ir_node *ret, const arch_register_t *reg)
1017 int arity = get_irn_arity(ret);
1019 /* do a naive linear search... */
1020 for (i = 0; i < arity; ++i) {
1021 const arch_register_req_t *req = arch_get_irn_register_req_in(ret, i);
1022 if (! (req->type & arch_register_req_type_limited))
1024 if (req->cls != arch_register_get_class(reg))
1026 if (!rbitset_is_set(req->limited, reg->index))
1030 panic("Tried querying undefined register '%s' at Return", reg->name);
1033 static ir_entity* dummy_get_frame_entity(const ir_node *node)
1039 static void dummy_set_frame_offset(ir_node *node, int bias)
1043 panic("should not be called");
1046 static int dummy_get_sp_bias(const ir_node *node)
1052 /* for "middleend" nodes */
1053 static const arch_irn_ops_t dummy_be_irn_ops = {
1054 dummy_get_frame_entity,
1055 dummy_set_frame_offset,
1057 NULL, /* get_inverse */
1058 NULL, /* get_op_estimated_cost */
1059 NULL, /* possible_memory_operand */
1060 NULL, /* perform_memory_operand */
1065 ir_node *be_new_Phi(ir_node *block, int n_ins, ir_node **ins, ir_mode *mode,
1066 const arch_register_req_t *req)
1068 ir_graph *irg = get_irn_irg(block);
1069 struct obstack *obst = be_get_be_obst(irg);
1070 backend_info_t *info;
1073 ir_node *phi = new_ir_node(NULL, irg, block, op_Phi, mode, n_ins, ins);
1074 phi->attr.phi.u.backedge = new_backedge_arr(irg->obst, n_ins);
1075 info = be_get_info(phi);
1076 info->out_infos = NEW_ARR_D(reg_out_info_t, obst, 1);
1077 memset(info->out_infos, 0, 1 * sizeof(info->out_infos[0]));
1078 info->in_reqs = OALLOCN(obst, const arch_register_req_t*, n_ins);
1080 info->out_infos[0].req = req;
1081 for (i = 0; i < n_ins; ++i) {
1082 info->in_reqs[i] = req;
1084 irn_verify_irg(phi, irg);
1085 phi = optimize_node(phi);
1089 void be_set_phi_reg_req(ir_node *node, const arch_register_req_t *req)
1091 int arity = get_irn_arity(node);
1094 backend_info_t *info = be_get_info(node);
1095 info->out_infos[0].req = req;
1096 for (i = 0; i < arity; ++i) {
1097 info->in_reqs[i] = req;
1100 assert(mode_is_datab(get_irn_mode(node)));
1103 void be_dump_phi_reg_reqs(FILE *F, const ir_node *node, dump_reason_t reason)
1106 case dump_node_opcode_txt:
1107 fputs(get_op_name(get_irn_op(node)), F);
1109 case dump_node_mode_txt:
1110 fprintf(F, "%s", get_mode_name(get_irn_mode(node)));
1112 case dump_node_nodeattr_txt:
1114 case dump_node_info_txt:
1116 backend_info_t *info = be_get_info(node);
1117 if (info != NULL && info->out_infos[0].req != NULL) {
1118 arch_dump_reqs_and_registers(F, node);
1128 static const arch_irn_ops_t phi_irn_ops = {
1129 dummy_get_frame_entity,
1130 dummy_set_frame_offset,
1132 NULL, /* get_inverse */
1133 NULL, /* get_op_estimated_cost */
1134 NULL, /* possible_memory_operand */
1135 NULL, /* perform_memory_operand */
1141 * ir_op-Operation: dump a be node to file
1143 static void dump_node(FILE *f, const ir_node *irn, dump_reason_t reason)
1145 assert(is_be_node(irn));
1148 case dump_node_opcode_txt:
1149 fputs(get_op_name(get_irn_op(irn)), f);
1151 case dump_node_mode_txt:
1152 if (be_is_Copy(irn) || be_is_CopyKeep(irn)) {
1153 fprintf(f, "%s", get_mode_name(get_irn_mode(irn)));
1156 case dump_node_nodeattr_txt:
1157 if (be_is_Call(irn)) {
1158 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(irn);
1160 fprintf(f, " [%s] ", get_entity_name(a->ent));
1162 if (be_is_IncSP(irn)) {
1163 const be_incsp_attr_t *attr = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
1164 fprintf(f, " [%d] ", attr->offset);
1167 case dump_node_info_txt:
1168 arch_dump_reqs_and_registers(f, irn);
1170 if (be_has_frame_entity(irn)) {
1171 const be_frame_attr_t *a = (const be_frame_attr_t*)get_irn_generic_attr_const(irn);
1173 unsigned size = get_type_size_bytes(get_entity_type(a->ent));
1174 ir_fprintf(f, "frame entity: %+F, offset 0x%x (%d), size 0x%x (%d) bytes\n",
1175 a->ent, a->offset, a->offset, size, size);
1180 switch (get_irn_opcode(irn)) {
1182 const be_incsp_attr_t *a = (const be_incsp_attr_t*)get_irn_generic_attr_const(irn);
1183 fprintf(f, "align: %d\n", a->align);
1184 fprintf(f, "offset: %d\n", a->offset);
1188 const be_call_attr_t *a = (const be_call_attr_t*)get_irn_generic_attr_const(irn);
1191 fprintf(f, "\ncalling: %s\n", get_entity_name(a->ent));
1196 for (i = 0; i < be_get_MemPerm_entity_arity(irn); ++i) {
1197 ir_entity *in, *out;
1198 in = be_get_MemPerm_in_entity(irn, i);
1199 out = be_get_MemPerm_out_entity(irn, i);
1201 fprintf(f, "\nin[%d]: %s\n", i, get_entity_name(in));
1204 fprintf(f, "\nout[%d]: %s\n", i, get_entity_name(out));
1218 * Copies the backend specific attributes from old node to new node.
1220 static void copy_attr(ir_graph *irg, const ir_node *old_node, ir_node *new_node)
1222 const void *old_attr = get_irn_generic_attr_const(old_node);
1223 void *new_attr = get_irn_generic_attr(new_node);
1224 struct obstack *obst = be_get_be_obst(irg);
1225 backend_info_t *old_info = be_get_info(old_node);
1226 backend_info_t *new_info = be_get_info(new_node);
1228 assert(is_be_node(old_node));
1229 assert(is_be_node(new_node));
1231 memcpy(new_attr, old_attr, get_op_attr_size(get_irn_op(old_node)));
1233 new_info->flags = old_info->flags;
1234 if (old_info->out_infos != NULL) {
1235 size_t n_outs = ARR_LEN(old_info->out_infos);
1236 /* need dyanmic out infos? */
1237 if (be_is_Perm(new_node)) {
1238 new_info->out_infos = NEW_ARR_F(reg_out_info_t, n_outs);
1240 new_info->out_infos = NEW_ARR_D(reg_out_info_t, obst, n_outs);
1242 memcpy(new_info->out_infos, old_info->out_infos,
1243 n_outs * sizeof(new_info->out_infos[0]));
1245 new_info->out_infos = NULL;
1249 if (old_info->in_reqs != NULL) {
1250 unsigned n_ins = get_irn_arity(old_node);
1251 /* need dynamic in infos? */
1252 if (get_irn_op(old_node)->opar == oparity_dynamic) {
1253 new_info->in_reqs = NEW_ARR_F(const arch_register_req_t*, n_ins);
1255 new_info->in_reqs = OALLOCN(obst,const arch_register_req_t*, n_ins);
1257 memcpy(new_info->in_reqs, old_info->in_reqs,
1258 n_ins * sizeof(new_info->in_reqs[0]));
1260 new_info->in_reqs = NULL;
1264 int is_be_node(const ir_node *irn)
1266 return get_op_ops(get_irn_op(irn))->be_ops == &be_node_irn_ops;
1269 static ir_op *new_be_op(unsigned code, const char *name, op_pin_state p,
1270 irop_flags flags, op_arity opar, size_t attr_size)
1272 ir_op *res = new_ir_op(code, name, p, flags, opar, 0, attr_size);
1273 res->ops.dump_node = dump_node;
1274 res->ops.copy_attr = copy_attr;
1275 res->ops.be_ops = &be_node_irn_ops;
1279 void be_init_op(void)
1283 assert(op_be_Spill == NULL);
1285 /* Acquire all needed opcodes. */
1286 op_be_Spill = new_be_op(beo_Spill, "be_Spill", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_frame_attr_t));
1287 op_be_Reload = new_be_op(beo_Reload, "be_Reload", op_pin_state_exc_pinned, irop_flag_none, oparity_zero, sizeof(be_frame_attr_t));
1288 op_be_Perm = new_be_op(beo_Perm, "be_Perm", op_pin_state_exc_pinned, irop_flag_none, oparity_variable, sizeof(be_node_attr_t));
1289 op_be_MemPerm = new_be_op(beo_MemPerm, "be_MemPerm", op_pin_state_exc_pinned, irop_flag_none, oparity_variable, sizeof(be_memperm_attr_t));
1290 op_be_Copy = new_be_op(beo_Copy, "be_Copy", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1291 op_be_Keep = new_be_op(beo_Keep, "be_Keep", op_pin_state_exc_pinned, irop_flag_keep, oparity_dynamic, sizeof(be_node_attr_t));
1292 op_be_CopyKeep = new_be_op(beo_CopyKeep, "be_CopyKeep", op_pin_state_exc_pinned, irop_flag_keep, oparity_variable, sizeof(be_node_attr_t));
1293 op_be_Call = new_be_op(beo_Call, "be_Call", op_pin_state_exc_pinned, irop_flag_fragile|irop_flag_uses_memory, oparity_variable, sizeof(be_call_attr_t));
1294 ir_op_set_memory_index(op_be_Call, n_be_Call_mem);
1295 ir_op_set_fragile_indices(op_be_Call, pn_be_Call_X_regular, pn_be_Call_X_except);
1296 op_be_Return = new_be_op(beo_Return, "be_Return", op_pin_state_exc_pinned, irop_flag_cfopcode, oparity_variable, sizeof(be_return_attr_t));
1297 op_be_AddSP = new_be_op(beo_AddSP, "be_AddSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1298 op_be_SubSP = new_be_op(beo_SubSP, "be_SubSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_node_attr_t));
1299 op_be_IncSP = new_be_op(beo_IncSP, "be_IncSP", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_incsp_attr_t));
1300 op_be_Start = new_be_op(beo_Start, "be_Start", op_pin_state_exc_pinned, irop_flag_none, oparity_zero, sizeof(be_node_attr_t));
1301 op_be_FrameAddr = new_be_op(beo_FrameAddr, "be_FrameAddr", op_pin_state_exc_pinned, irop_flag_none, oparity_unary, sizeof(be_frame_attr_t));
1303 op_be_Spill->ops.node_cmp_attr = FrameAddr_cmp_attr;
1304 op_be_Reload->ops.node_cmp_attr = FrameAddr_cmp_attr;
1305 op_be_Perm->ops.node_cmp_attr = be_nodes_equal;
1306 op_be_MemPerm->ops.node_cmp_attr = be_nodes_equal;
1307 op_be_Copy->ops.node_cmp_attr = be_nodes_equal;
1308 op_be_Keep->ops.node_cmp_attr = be_nodes_equal;
1309 op_be_CopyKeep->ops.node_cmp_attr = be_nodes_equal;
1310 op_be_Call->ops.node_cmp_attr = Call_cmp_attr;
1311 op_be_Return->ops.node_cmp_attr = Return_cmp_attr;
1312 op_be_AddSP->ops.node_cmp_attr = be_nodes_equal;
1313 op_be_SubSP->ops.node_cmp_attr = be_nodes_equal;
1314 op_be_IncSP->ops.node_cmp_attr = IncSP_cmp_attr;
1315 op_be_Start->ops.node_cmp_attr = be_nodes_equal;
1316 op_be_FrameAddr->ops.node_cmp_attr = FrameAddr_cmp_attr;
1318 /* attach out dummy_ops to middle end nodes */
1319 for (opc = iro_First; opc <= iro_Last; ++opc) {
1320 ir_op *op = ir_get_opcode(opc);
1321 assert(op->ops.be_ops == NULL);
1322 op->ops.be_ops = &dummy_be_irn_ops;
1325 op_Phi->ops.be_ops = &phi_irn_ops;
1328 void be_finish_op(void)
1330 free_ir_op(op_be_Spill); op_be_Spill = NULL;
1331 free_ir_op(op_be_Reload); op_be_Reload = NULL;
1332 free_ir_op(op_be_Perm); op_be_Perm = NULL;
1333 free_ir_op(op_be_MemPerm); op_be_MemPerm = NULL;
1334 free_ir_op(op_be_Copy); op_be_Copy = NULL;
1335 free_ir_op(op_be_Keep); op_be_Keep = NULL;
1336 free_ir_op(op_be_CopyKeep); op_be_CopyKeep = NULL;
1337 free_ir_op(op_be_Call); op_be_Call = NULL;
1338 free_ir_op(op_be_Return); op_be_Return = NULL;
1339 free_ir_op(op_be_IncSP); op_be_IncSP = NULL;
1340 free_ir_op(op_be_AddSP); op_be_AddSP = NULL;
1341 free_ir_op(op_be_SubSP); op_be_SubSP = NULL;
1342 free_ir_op(op_be_Start); op_be_Start = NULL;
1343 free_ir_op(op_be_FrameAddr); op_be_FrameAddr = NULL;