2 * Chordal register allocation.
3 * @author Sebastian Hack
6 * Copyright (C) Universitaet Karlsruhe
7 * Released under the GPL
29 #include "bipartite.h"
32 #include "irgraph_t.h"
33 #include "irprintf_t.h"
43 #include "besched_t.h"
49 #include "bechordal_t.h"
50 #include "bechordal_draw.h"
52 #define DBG_LEVEL SET_LEVEL_0
53 #define DBG_LEVEL_CHECK SET_LEVEL_0
57 #define DUMP_INTERVALS
59 typedef struct _be_chordal_alloc_env_t {
60 be_chordal_env_t *chordal_env;
62 pset *pre_colored; /**< Set of precolored nodes. */
63 bitset_t *live; /**< A liveness bitset. */
64 bitset_t *colors; /**< The color mask. */
65 bitset_t *valid_colors; /**< A mask of colors which shall be considered during allocation.
66 Registers with the ignore bit on, must not be considered. */
67 bitset_t *in_colors; /**< Colors used by live in values. */
68 int colors_n; /**< The number of colors. */
69 } be_chordal_alloc_env_t;
73 /* Make a fourcc for border checking. */
74 #define BORDER_FOURCC FOURCC('B', 'O', 'R', 'D')
76 static void check_border_list(struct list_head *head)
79 list_for_each_entry(border_t, x, head, list) {
80 assert(x->magic == BORDER_FOURCC);
84 static void check_heads(be_chordal_env_t *env)
87 for(ent = pmap_first(env->border_heads); ent; ent = pmap_next(env->border_heads)) {
88 /* ir_printf("checking border list of block %+F\n", ent->key); */
89 check_border_list(ent->value);
95 * Add an interval border to the list of a block's list
97 * @note You always have to create the use before the def.
98 * @param env The environment.
99 * @param head The list head to enqueue the borders.
100 * @param irn The node (value) the border belongs to.
101 * @param pressure The pressure at this point in time.
102 * @param step A time step for the border.
103 * @param is_def Is the border a use or a def.
104 * @return The created border.
106 static INLINE border_t *border_add(be_chordal_env_t *env, struct list_head *head,
107 ir_node *irn, unsigned step, unsigned pressure,
108 unsigned is_def, unsigned is_real)
115 b = obstack_alloc(&env->obst, sizeof(*b));
117 /* also allocate the def and tie it to the use. */
118 def = obstack_alloc(&env->obst, sizeof(*def));
119 memset(def, 0, sizeof(*def));
124 * Set the link field of the irn to the def.
125 * This strongly relies on the fact, that the use is always
126 * made before the def.
128 set_irn_link(irn, def);
130 b->magic = BORDER_FOURCC;
131 def->magic = BORDER_FOURCC;
135 * If the def is encountered, the use was made and so was the
136 * the def node (see the code above). It was placed into the
137 * link field of the irn, so we can get it there.
140 b = get_irn_link(irn);
142 assert(b && b->magic == BORDER_FOURCC && "Illegal border encountered");
145 b->pressure = pressure;
147 b->is_real = is_real;
150 list_add_tail(&b->list, head);
151 DBG((env->dbg, LEVEL_5, "\t\t%s adding %+F, step: %d\n", is_def ? "def" : "use", irn, step));
158 * Check, if an irn is of the register class currently under processing.
159 * @param env The chordal environment.
160 * @param irn The node.
161 * @return 1, if the node is of that register class, 0 if not.
163 static INLINE int has_reg_class(const be_chordal_env_t *env, const ir_node *irn)
165 return arch_irn_has_reg_class(env->main_env->arch_env, irn, -1, env->cls);
168 #define has_limited_constr(req, irn) \
169 (arch_get_register_req(arch_env, (req), irn, -1) && (req)->type == arch_register_req_type_limited)
171 typedef struct _operand_t operand_t;
178 arch_register_req_t req;
186 unsigned has_constraints : 1;
189 static insn_t *scan_insn(be_chordal_env_t *env, ir_node *irn, struct obstack *obst)
191 const arch_env_t *arch_env = env->main_env->arch_env;
196 insn = obstack_alloc(obst, sizeof(insn[0]));
197 memset(insn, 0, sizeof(insn[0]));
199 insn->next_insn = sched_next(irn);
200 if(get_irn_mode(irn) == mode_T) {
203 for(p = sched_next(irn); is_Proj(p); p = sched_next(p)) {
204 if(arch_irn_consider_in_reg_alloc(arch_env, env->cls, p)) {
207 o.pos = -(get_Proj_proj(p) + 1);
209 arch_get_register_req(arch_env, &o.req, p, -1);
210 obstack_grow(obst, &o, sizeof(o));
212 insn->has_constraints |= arch_register_req_is(&o.req, limited);
219 else if(arch_irn_consider_in_reg_alloc(arch_env, env->cls, irn)) {
224 arch_get_register_req(arch_env, &o.req, irn, -1);
225 obstack_grow(obst, &o, sizeof(o));
227 insn->has_constraints |= arch_register_req_is(&o.req, limited);
230 insn->use_start = insn->n_ops;
232 for(i = 0, n = get_irn_arity(irn); i < n; ++i) {
233 ir_node *op = get_irn_n(irn, i);
235 if(arch_irn_consider_in_reg_alloc(arch_env, env->cls, op)) {
240 arch_get_register_req(arch_env, &o.req, irn, i);
241 obstack_grow(obst, &o, sizeof(o));
243 insn->has_constraints |= arch_register_req_is(&o.req, limited);
247 insn->ops = obstack_finish(obst);
251 static operand_t *find_unpaired_use(insn_t *insn, const operand_t *op, int can_be_constrained)
254 operand_t *res = NULL;
256 for(i = insn->use_start; i < insn->n_ops; ++i) {
257 operand_t *op = &insn->ops[i];
258 int has_constraint = arch_register_req_is(&op->req, limited);
260 if(!values_interfere(op->carrier, op->irn) && !op->partner && (!has_constraint || can_be_constrained)) {
261 if(arch_register_req_is(&op->req, should_be_same) && op->req.other_same == op->carrier)
271 static void pair_up_operands(insn_t *insn)
273 firm_dbg_module_t *dbg = firm_dbg_register("firm.be.chordal.constr");
276 for(i = 0; i < insn->use_start; ++i) {
277 operand_t *op = &insn->ops[i];
278 int has_constraint = arch_register_req_is(&op->req, limited);
279 operand_t *partner = find_unpaired_use(insn, op, !has_constraint);
282 op->partner = partner;
283 partner->partner = op;
288 static ir_node *handle_constraints(be_chordal_alloc_env_t *alloc_env, ir_node *irn)
290 be_chordal_env_t *env = alloc_env->chordal_env;
291 void *base = obstack_base(&env->obst);
292 insn_t *insn = scan_insn(env, irn, &env->obst);
293 ir_node *res = insn->next_insn;
295 if(insn->has_constraints) {
296 firm_dbg_module_t *dbg = firm_dbg_register("firm.be.chordal.constr");
297 const arch_env_t *aenv = env->main_env->arch_env;
298 int n_regs = env->cls->n_regs;
299 bitset_t *bs = bitset_alloca(n_regs);
300 ir_node **alloc_nodes = alloca(n_regs * sizeof(alloc_nodes[0]));
301 bipartite_t *bp = bipartite_new(n_regs, n_regs);
302 int *assignment = alloca(n_regs * sizeof(assignment[0]));
303 pmap *partners = pmap_create();
307 const ir_edge_t *edge;
308 ir_node *perm = insert_Perm_after(aenv, env->cls, env->dom_front, sched_prev(irn));
310 /* Registers are propagated by insert_Perm_after(). Clean them here! */
312 foreach_out_edge(perm, edge) {
313 ir_node *proj = get_edge_src_irn(edge);
314 arch_set_irn_register(aenv, proj, NULL);
319 be_liveness(env->irg);
320 insn = scan_insn(env, irn, &env->obst);
322 DBG((dbg, LEVEL_1, "handling constraints for %+F\n", irn));
325 * If there was no Perm made, nothing was alive in this register class.
326 * This means, that the node has no operands, thus no input constraints.
327 * so it had output constraints. The other results then can be assigned freeliy.
330 pair_up_operands(insn);
332 for(i = 0, n_alloc = 0; i < insn->n_ops; ++i) {
333 operand_t *op = &insn->ops[i];
334 if(arch_register_req_is(&op->req, limited)) {
335 pmap_insert(partners, op->carrier, op->partner ? op->partner->carrier : NULL);
336 alloc_nodes[n_alloc] = op->carrier;
338 DBG((dbg, LEVEL_2, "\tassociating %+F and %+F\n", op->carrier, pmap_get(partners, op->carrier)));
340 bitset_clear_all(bs);
341 op->req.limited(op->req.limited_env, bs);
342 bitset_and(bs, alloc_env->valid_colors);
344 DBG((dbg, LEVEL_2, "\tallowed registers for %+F: %B\n", op->carrier, bs));
346 bitset_foreach(bs, col)
347 bipartite_add(bp, n_alloc, col);
354 foreach_out_edge(perm, edge) {
355 ir_node *proj = get_edge_src_irn(edge);
357 assert(is_Proj(proj));
359 if(values_interfere(proj, irn)) {
360 assert(n_alloc < n_regs);
361 alloc_nodes[n_alloc] = proj;
362 pmap_insert(partners, proj, NULL);
364 bitset_clear_all(bs);
365 arch_get_allocatable_regs(aenv, proj, -1, bs);
366 bitset_and(bs, alloc_env->valid_colors);
367 bitset_foreach(bs, col)
368 bipartite_add(bp, n_alloc, col);
375 bipartite_matching(bp, assignment);
378 for(i = 0; i < n_alloc; ++i) {
381 const arch_register_t *reg;
383 assert(assignment[i] >= 0 && "there must have been a register assigned");
384 reg = arch_register_for_index(env->cls, assignment[i]);
386 nodes[0] = alloc_nodes[i];
387 nodes[1] = pmap_get(partners, alloc_nodes[i]);
389 for(j = 0; j < 2; ++j) {
393 arch_set_irn_register(aenv, nodes[j], reg);
394 pset_hinsert_ptr(alloc_env->pre_colored, nodes[j]);
395 DBG((dbg, LEVEL_2, "\tsetting %+F to register %s\n", nodes[j], reg->name));
401 bitset_clear_all(bs);
402 foreach_out_edge(perm, edge) {
403 ir_node *proj = get_edge_src_irn(edge);
404 const arch_register_t *reg = arch_get_irn_register(aenv, proj);
407 bitset_set(bs, reg->index);
410 // bitset_or(bs, alloc_env->ignore_colors);
411 foreach_out_edge(perm, edge) {
412 ir_node *proj = get_edge_src_irn(edge);
413 const arch_register_t *reg = arch_get_irn_register(aenv, proj);
415 DBG((dbg, LEVEL_2, "\tchecking reg of %+F: %s\n", proj, reg ? reg->name : "<none>"));
418 col = bitset_next_clear(bs, 0);
419 reg = arch_register_for_index(env->cls, col);
420 bitset_set(bs, reg->index);
421 arch_set_irn_register(aenv, proj, reg);
422 pset_insert_ptr(alloc_env->pre_colored, proj);
423 DBG((dbg, LEVEL_2, "\tsetting %+F to register %s\n", proj, reg->name));
428 pmap_destroy(partners);
431 obstack_free(&env->obst, base);
436 * Handle constraint nodes in each basic block.
437 * be_insert_constr_perms() inserts Perm nodes which perm
438 * over all values live at the constrained node right in front
439 * of the constrained node. These Perms signal a constrained node.
440 * For further comments, refer to handle_constraints_at_perm().
442 static void constraints(ir_node *bl, void *data)
444 firm_dbg_module_t *dbg = firm_dbg_register("firm.be.chordal.constr");
445 be_chordal_alloc_env_t *env = data;
446 arch_env_t *arch_env = env->chordal_env->main_env->arch_env;
449 for(irn = sched_first(bl); !sched_is_end(irn);) {
450 irn = handle_constraints(env, irn);
455 * Annotate the register pressure to the nodes and compute
456 * the liveness intervals.
457 * @param block The block to do it for.
458 * @param env_ptr The environment.
460 static void pressure(ir_node *block, void *env_ptr)
462 /* Convenience macro for a def */
463 #define border_def(irn, step, real) \
464 border_add(env, head, irn, step, pressure--, 1, real)
466 /* Convenience macro for a use */
467 #define border_use(irn, step, real) \
468 border_add(env, head, irn, step, ++pressure, 0, real)
470 be_chordal_alloc_env_t *alloc_env = env_ptr;
471 be_chordal_env_t *env = alloc_env->chordal_env;
472 bitset_t *live = alloc_env->live;
473 firm_dbg_module_t *dbg = env->dbg;
478 unsigned pressure = 0;
479 struct list_head *head;
480 pset *live_in = put_live_in(block, pset_new_ptr_default());
481 pset *live_end = put_live_end(block, pset_new_ptr_default());
483 DBG((dbg, LEVEL_1, "Computing pressure in block %+F\n", block));
484 bitset_clear_all(live);
486 /* Set up the border list in the block info */
487 head = obstack_alloc(&env->obst, sizeof(*head));
488 INIT_LIST_HEAD(head);
489 assert(pmap_get(env->border_heads, block) == NULL);
490 pmap_insert(env->border_heads, block, head);
493 * Make final uses of all values live out of the block.
494 * They are necessary to build up real intervals.
496 for(irn = pset_first(live_end); irn; irn = pset_next(live_end)) {
497 if(has_reg_class(env, irn)) {
498 DBG((dbg, LEVEL_3, "\tMaking live: %+F/%d\n", irn, get_irn_graph_nr(irn)));
499 bitset_set(live, get_irn_graph_nr(irn));
500 border_use(irn, step, 0);
506 * Determine the last uses of a value inside the block, since they are
507 * relevant for the interval borders.
509 sched_foreach_reverse(block, irn) {
510 DBG((dbg, LEVEL_1, "\tinsn: %+F, pressure: %d\n", irn, pressure));
511 DBG((dbg, LEVEL_2, "\tlive: %b\n", live));
514 * If the node defines some value, which can put into a
515 * register of the current class, make a border for it.
517 if(has_reg_class(env, irn)) {
518 int nr = get_irn_graph_nr(irn);
520 bitset_clear(live, nr);
521 border_def(irn, step, 1);
525 * If the node is no phi node we can examine the uses.
528 for(i = 0, n = get_irn_arity(irn); i < n; ++i) {
529 ir_node *op = get_irn_n(irn, i);
531 if(has_reg_class(env, op)) {
532 int nr = get_irn_graph_nr(op);
534 DBG((dbg, LEVEL_4, "\t\tpos: %d, use: %+F\n", i, op));
536 if(!bitset_is_set(live, nr)) {
537 border_use(op, step, 1);
538 bitset_set(live, nr);
547 * Add initial defs for all values live in.
549 for(irn = pset_first(live_in); irn; irn = pset_next(live_in)) {
550 if(has_reg_class(env, irn)) {
552 /* Mark the value live in. */
553 bitset_set(live, get_irn_graph_nr(irn));
556 border_def(irn, step, 0);
565 static void assign(ir_node *block, void *env_ptr)
567 be_chordal_alloc_env_t *alloc_env = env_ptr;
568 be_chordal_env_t *env = alloc_env->chordal_env;
569 firm_dbg_module_t *dbg = env->dbg;
570 bitset_t *live = alloc_env->live;
571 bitset_t *colors = alloc_env->colors;
572 bitset_t *in_colors = alloc_env->in_colors;
573 const arch_env_t *arch_env = env->main_env->arch_env;
577 struct list_head *head = get_block_border_head(env, block);
578 pset *live_in = put_live_in(block, pset_new_ptr_default());
580 bitset_clear_all(live);
581 bitset_clear_all(in_colors);
583 bitset_copy(colors, alloc_env->valid_colors);
584 bitset_flip_all(colors);
586 DBG((dbg, LEVEL_4, "Assigning colors for block %+F\n", block));
587 DBG((dbg, LEVEL_4, "\tusedef chain for block\n"));
588 list_for_each_entry(border_t, b, head, list) {
589 DBG((dbg, LEVEL_4, "\t%s %+F/%d\n", b->is_def ? "def" : "use",
590 b->irn, get_irn_graph_nr(b->irn)));
594 * Add initial defs for all values live in.
595 * Since their colors have already been assigned (The dominators were
596 * allocated before), we have to mark their colors as used also.
598 for(irn = pset_first(live_in); irn; irn = pset_next(live_in)) {
599 if(has_reg_class(env, irn)) {
600 const arch_register_t *reg = arch_get_irn_register(arch_env, irn);
603 assert(reg && "Node must have been assigned a register");
604 col = arch_register_get_index(reg);
606 /* Mark the color of the live in value as used. */
607 bitset_set(colors, col);
608 bitset_set(in_colors, col);
610 /* Mark the value live in. */
611 bitset_set(live, get_irn_graph_nr(irn));
616 * Mind that the sequence of defs from back to front defines a perfect
617 * elimination order. So, coloring the definitions from first to last
620 list_for_each_entry_reverse(border_t, b, head, list) {
621 ir_node *irn = b->irn;
622 int nr = get_irn_graph_nr(irn);
625 * Assign a color, if it is a local def. Global defs already have a
628 if(b->is_def && !is_live_in(block, irn)) {
629 const arch_register_t *reg;
632 if(pset_find_ptr(alloc_env->pre_colored, irn)) {
633 reg = arch_get_irn_register(arch_env, irn);
635 assert(!bitset_is_set(colors, col) && "pre-colored register must be free");
639 col = bitset_next_clear(colors, 0);
640 reg = arch_register_for_index(env->cls, col);
641 assert(arch_get_irn_register(arch_env, irn) == NULL && "This node must not have been assigned a register yet");
644 bitset_set(colors, col);
645 arch_set_irn_register(arch_env, irn, reg);
647 DBG((dbg, LEVEL_1, "\tassigning register %s(%d) to %+F\n",
648 arch_register_get_name(reg), col, irn));
650 assert(!bitset_is_set(live, nr) && "Value's definition must not have been encountered");
651 bitset_set(live, nr);
654 /* Clear the color upon a use. */
655 else if(!b->is_def) {
656 const arch_register_t *reg = arch_get_irn_register(arch_env, irn);
659 assert(reg && "Register must have been assigned");
661 col = arch_register_get_index(reg);
662 assert(bitset_is_set(live, nr) && "Cannot have a non live use");
664 bitset_clear(colors, col);
665 bitset_clear(live, nr);
672 void be_ra_chordal_color(be_chordal_env_t *chordal_env)
674 be_chordal_alloc_env_t env;
677 int colors_n = arch_register_class_n_regs(chordal_env->cls);
678 ir_graph *irg = chordal_env->irg;
681 if(get_irg_dom_state(irg) != dom_consistent)
684 env.chordal_env = chordal_env;
685 env.colors_n = colors_n;
686 env.colors = bitset_malloc(colors_n);
687 env.valid_colors = bitset_malloc(colors_n);
688 env.in_colors = bitset_malloc(colors_n);
689 env.pre_colored = pset_new_ptr_default();
691 arch_put_non_ignore_regs(chordal_env->main_env->arch_env, chordal_env->cls, env.valid_colors);
693 /* Handle register targeting constraints */
694 dom_tree_walk_irg(irg, constraints, NULL, &env);
696 if(chordal_env->opts->dump_flags & BE_CH_DUMP_CONSTR) {
697 snprintf(buf, sizeof(buf), "-%s-constr", chordal_env->cls->name);
698 dump_ir_block_graph_sched(chordal_env->irg, buf);
702 env.live = bitset_malloc(get_graph_node_count(chordal_env->irg));
704 /* First, determine the pressure */
705 dom_tree_walk_irg(irg, pressure, NULL, &env);
707 /* Assign the colors */
708 dom_tree_walk_irg(irg, assign, NULL, &env);
710 be_numbering_done(irg);
712 if(chordal_env->opts->dump_flags & BE_CH_DUMP_TREE_INTV) {
715 ir_snprintf(buf, sizeof(buf), "ifg_%s_%F.eps", chordal_env->cls->name, irg);
716 plotter = new_plotter_ps(buf);
717 draw_interval_tree(&draw_chordal_def_opts, chordal_env, plotter);
718 plotter_free(plotter);
724 free(env.valid_colors);
726 del_pset(env.pre_colored);