4 * Copyright: (c) Universitaet Karlsruhe
5 * Licence: This file protected by GPL - GNU GENERAL PUBLIC LICENSE.
7 * Heuristic for minimizing copies using a queue which holds 'qnodes' not yet
8 * examined. A qnode has a 'target color', nodes out of the opt unit and
9 * a 'conflict graph'. 'Conflict graph' = "Interference graph' + 'conflict edges'
10 * A 'max indep set' is determined form these. We try to color this mis using a
11 * color-exchanging mechanism. Occuring conflicts are modeled with 'conflict edges'
12 * and the qnode is reinserted in the queue. The first qnode colored without
13 * conflicts is the best one.
27 #include "becopyopt.h"
28 #include "becopystat.h"
30 #define DEBUG_LVL 0 //SET_LEVEL_1
31 static firm_dbg_module_t *dbg = NULL;
33 #define SLOTS_PINNED_GLOBAL 256
34 #define SLOTS_CONFLICTS 8
35 #define SLOTS_CHANGED_NODES 32
37 #define MIN(a,b) ((a<b)?(a):(b))
38 #define list_entry_queue(lh) list_entry(lh, qnode_t, queue)
39 #define HASH_CONFLICT(c) (HASH_PTR(c.n1) ^ HASH_PTR(c.n2))
42 * Modeling additional conflicts between nodes. NOT live range interference
44 typedef struct _conflict_t {
45 const ir_node *n1, *n2;
49 * If an irn is changed, the changes first get stored in a node_stat_t,
50 * to allow undo of changes (=drop new data) in case of conflicts.
52 typedef struct _node_stat_t {
59 * Represents a node in the optimization queue.
61 typedef struct _qnode_t {
62 struct list_head queue; /**< chaining of unit_t->queue */
63 const unit_t *ou; /**< the opt unit this qnode belongs to */
64 int color; /**< target color */
65 set *conflicts; /**< contains conflict_t's. All internal conflicts */
66 int mis_size; /**< number of nodes in the mis. */
67 ir_node **mis; /**< the nodes of unit_t->nodes[] being part of the max independent set */
68 set *changed_nodes; /**< contains node_stat_t's. */
71 pset *pinned_global; /**< optimized nodes should not be altered any more */
73 static int set_cmp_conflict_t(const void *x, const void *y, size_t size) {
74 const conflict_t *xx = x;
75 const conflict_t *yy = y;
76 return ! (xx->n1 == yy->n1 && xx->n2 == yy->n2);
80 * If a local pinned conflict occurs, a new edge in the conflict graph is added.
81 * The next maximum independent set build, will regard it.
83 static INLINE void qnode_add_conflict(const qnode_t *qn, const ir_node *n1, const ir_node *n2) {
85 DBG((dbg, LEVEL_4, "\t %n -- %n\n", n1, n2));
87 if ((int)n1 < (int)n2) {
94 set_insert(qn->conflicts, &c, sizeof(c), HASH_CONFLICT(c));
98 * Checks if two nodes are in a conflict.
100 static INLINE int qnode_are_conflicting(const qnode_t *qn, const ir_node *n1, const ir_node *n2) {
102 /* search for live range interference */
103 if (n1!=n2 && nodes_interfere(qn->ou->co->chordal_env, n1, n2))
105 /* search for recoloring conflicts */
106 if ((int)n1 < (int)n2) {
113 return (int) set_find(qn->conflicts, &c, sizeof(c), HASH_CONFLICT(c));
116 static int set_cmp_node_stat_t(const void *x, const void *y, size_t size) {
117 return ((node_stat_t *)x)->irn != ((node_stat_t *)y)->irn;
121 * Finds a node status entry of a node if existent. Otherwise return NULL
123 static INLINE node_stat_t *qnode_find_node(const qnode_t *qn, ir_node *irn) {
126 return set_find(qn->changed_nodes, &find, sizeof(find), HASH_PTR(irn));
130 * Finds a node status entry of a node if existent. Otherwise it will return
131 * an initialized new entry for this node.
133 static INLINE node_stat_t *qnode_find_or_insert_node(const qnode_t *qn, ir_node *irn) {
136 find.new_color = NO_COLOR;
137 find.pinned_local = 0;
138 return set_insert(qn->changed_nodes, &find, sizeof(find), HASH_PTR(irn));
142 * Returns the virtual color of a node if set before, else returns the real color.
144 static INLINE int qnode_get_new_color(const qnode_t *qn, ir_node *irn) {
145 node_stat_t *found = qnode_find_node(qn, irn);
147 return found->new_color;
149 return get_irn_col(qn->ou->co, irn);
153 * Sets the virtual color of a node.
155 static INLINE void qnode_set_new_color(const qnode_t *qn, ir_node *irn, int color) {
156 node_stat_t *found = qnode_find_or_insert_node(qn, irn);
157 found->new_color = color;
161 * Checks if a node is local pinned. A node is local pinned, iff it belongs
162 * to the same optimization unit and has been optimized before the current
165 static INLINE int qnode_is_pinned_local(const qnode_t *qn, ir_node *irn) {
166 node_stat_t *found = qnode_find_node(qn, irn);
168 return found->pinned_local;
174 * Local-pins a node, so optimizations of further nodes of the same opt unit
175 * can handle situations in which a color change would undo prior optimizations.
177 static INLINE void qnode_pin_local(const qnode_t *qn, ir_node *irn) {
178 node_stat_t *found = qnode_find_or_insert_node(qn, irn);
179 found->pinned_local = 1;
183 * Possible return values of qnode_color_irn()
185 #define CHANGE_SAVE NULL
186 #define CHANGE_IMPOSSIBLE (ir_node *)1
187 #define is_conflicting_node(n) (((int)n) > 1)
190 * Performs virtual re-coloring of node @p n to color @p col. Virtual colors of
191 * other nodes are changed too, as required to preserve correctness. Function is
192 * aware of local and global pinning. Recursive.
193 * @param irn The node to set the color for
194 * @param col The color to set
195 * @param trigger The irn that caused the wish to change the color of the irn
196 * @return CHANGE_SAVE iff setting the color is possible, with all transitive effects.
197 * CHANGE_IMPOSSIBLE iff conflicts with reg-constraintsis occured.
198 * Else the first conflicting ir_node encountered is returned.
200 * ASSUMPTION: Assumes that a life range of a single value can't be split into
201 * several smaller intervals where other values can live in between.
202 * This should be true in SSA.
204 static ir_node *qnode_color_irn(const qnode_t *qn, ir_node *irn, int col, const ir_node *trigger) {
206 struct obstack confl_ob;
207 ir_node **confl, *cn;
209 const be_chordal_env_t *chordal_env = qn->ou->co->chordal_env;
210 const arch_env_t *arch_env = chordal_env->arch_env;
211 const arch_register_class_t *cls = chordal_env->cls;
213 DBG((dbg, LEVEL_3, "\t %n \tcaused col(%n) \t%2d --> %2d\n", trigger, irn, qnode_get_new_color(qn, irn), col));
214 obstack_init(&confl_ob);
215 irn_col = qnode_get_new_color(qn, irn);
219 if (pset_find_ptr(pinned_global, irn) || qnode_is_pinned_local(qn, irn)) {
223 if (!arch_reg_is_allocatable(arch_env,
225 arch_pos_make_out(0),
226 arch_register_for_index(cls, col)))
229 /* get all nodes which would conflict with this change */
235 irn_bl = get_nodes_block(irn);
237 /* first check for a conflicting node which is 'living in' the irns block */
240 pset *live_ins = put_live_in(irn_bl, pset_new_ptr_default());
241 for (n = pset_first(live_ins); n; n = pset_next(live_ins))
242 if (arch_irn_has_reg_class(arch_env, n, arch_pos_make_out(0), cls)
243 && n != trigger && qnode_get_new_color(qn, n) == col
244 && nodes_interfere(chordal_env, irn, n)) {
246 DBG((dbg, LEVEL_4, "\t %n\ttroubles\n", n));
247 obstack_ptr_grow(&confl_ob, n);
248 pset_break(live_ins);
254 /* setup the queue of blocks. */
256 obstack_ptr_grow(&q, irn_bl);
260 /* process the queue. The code below checks for every block dominated
261 * by the irns one, and in which the irn is live, if there are
262 * conflicting nodes */
264 ir_node *curr_bl, *sub_bl;
267 curr_bl = ((ir_node **)obstack_base(&q))[out++];
269 /* Add to the result all nodes in the block, which have
270 * the target color and interfere with the irn */
271 for (i = 0, max = get_irn_n_outs(curr_bl); i < max; ++i) {
272 ir_node *n = get_irn_out(curr_bl, i);
273 if (arch_irn_has_reg_class(arch_env, n, arch_pos_make_out(0), cls)
274 && n != trigger && qnode_get_new_color(qn, n) == col
275 && nodes_interfere(chordal_env, irn, n)) {
277 DBG((dbg, LEVEL_4, "\t %n\ttroubles\n", n));
278 obstack_ptr_grow(&confl_ob, n);
282 /* If irn lives out check i-dominated blocks where the irn lives in */
284 if (is_live_out(curr_bl, irn)) {
285 dominates_for_each(curr_bl, sub_bl)
286 if (is_live_in(sub_bl, irn)) {
287 obstack_ptr_grow(&q, sub_bl);
292 obstack_free(&q, NULL);
293 obstack_ptr_grow(&confl_ob, NULL);
294 confl = (ir_node **) obstack_finish(&confl_ob);
297 /* process all nodes which would conflict with this change */
298 for (i = 0, cn = confl[0]; cn; cn = confl[++i]) {
301 /* try to color the conflicting node cn with the color of the irn itself */
302 sub_res = qnode_color_irn(qn, cn, irn_col, irn);
303 if (sub_res != CHANGE_SAVE) {
308 /* if we arrive here all sub changes can be applied, so it's save to change this irn */
311 DBG((dbg, LEVEL_3, "\t %n save\n", irn));
312 obstack_free(&confl_ob, NULL);
313 qnode_set_new_color(qn, irn, col);
317 DBG((dbg, LEVEL_3, "\t %n impossible\n", irn));
318 obstack_free(&confl_ob, NULL);
319 return CHANGE_IMPOSSIBLE;
322 DBG((dbg, LEVEL_3, "\t %n conflicting\n", irn));
323 obstack_free(&confl_ob, NULL);
328 * Tries to set the colors for all members of this queue node;
329 * to the target color qn->color
330 * @returns 1 iff all members colors could be set
333 static int qnode_try_color(const qnode_t *qn) {
335 for (i=0; i<qn->mis_size; ++i) {
336 ir_node *test_node, *confl_node;
338 test_node = qn->mis[i];
339 DBG((dbg, LEVEL_3, "\t Testing %n\n", test_node));
340 confl_node = qnode_color_irn(qn, test_node, qn->color, test_node);
342 if (confl_node == CHANGE_SAVE) {
343 DBG((dbg, LEVEL_3, "\t Save --> pin local\n"));
344 qnode_pin_local(qn, test_node);
345 } else if (confl_node == CHANGE_IMPOSSIBLE) {
346 DBG((dbg, LEVEL_3, "\t Impossible --> remove from qnode\n"));
347 qnode_add_conflict(qn, test_node, test_node);
349 if (qnode_is_pinned_local(qn, confl_node)) {
350 /* changing test_node would change back a node of current ou */
351 DBG((dbg, LEVEL_3, "\t Conflicting local --> add conflict\n"));
352 qnode_add_conflict(qn, confl_node, test_node);
354 if (pset_find_ptr(pinned_global, confl_node)) {
355 /* changing test_node would change back a node of a prior ou */
356 DBG((dbg, LEVEL_3, "\t Conflicting global --> remove from qnode\n"));
357 qnode_add_conflict(qn, test_node, test_node);
361 if (confl_node != CHANGE_SAVE)
368 * Determines a maximum independent set with respect to the interference and
369 * conflict edges of all nodes in a qnode.
371 static INLINE void qnode_max_ind_set(qnode_t *qn, const unit_t *ou) {
372 int all_size, curr_size, i, o;
374 ir_node **curr, **all = alloca(ou->node_count * sizeof(*all));
376 /* all contains all nodes not removed in this qn */
378 for (i=0; i<ou->node_count; ++i)
379 if (!qnode_are_conflicting(qn, ou->nodes[i], ou->nodes[i]))
380 all[all_size++] = ou->nodes[i];
382 /* which[i] says which element to take out of all[] and put into curr[i] */
383 which = alloca(all_size*sizeof(*which));
384 for (curr_size=0; curr_size<all_size; ++curr_size)
385 which[curr_size] = curr_size;
387 /* stores the currently examined set */
388 curr = alloca(all_size*sizeof(*curr));
390 while (1) { /* this loop will terminate because at least a single node will be a max indep. set */
391 /* build current set */
392 for (i=0; i<curr_size; ++i)
393 curr[i] = all[which[all_size-curr_size+i]];
395 /* check current set */
396 for (i=0; i<curr_size; ++i)
397 for (o=i+1; o<curr_size; ++o)
398 if (qnode_are_conflicting(qn, curr[i], curr[o]))
401 /* We had no conflict. This is the max indep. set */
402 qn->mis_size = curr_size;
403 for (i=0; i<curr_size; ++i)
404 qn->mis[i] = curr[i];
408 /* We had a conflict. Generate next set */
409 if (which[all_size-curr_size+1] == all_size-curr_size+1) {
411 for (i=0; i<curr_size; ++i)
412 which[all_size-curr_size+i] = i;
419 } while (!(which[pos] = (which[pos]+1) % all_size));
421 for (i=pos+1; i<all_size; ++i)
422 which[i] = MIN(which[i-1]+1, all_size-1);
425 for (i=all_size-curr_size; i<all_size-1; ++i)
426 if (which[i]>=which[i+1]) {
436 * Creates a new qnode
438 static INLINE qnode_t *new_qnode(const unit_t *ou, int color) {
439 qnode_t *qn = xmalloc(sizeof(*qn));
442 qn->mis = malloc(ou->node_count * sizeof(*qn->mis));
443 qn->conflicts = new_set(set_cmp_conflict_t, SLOTS_CONFLICTS);
444 qn->changed_nodes = new_set(set_cmp_node_stat_t, SLOTS_CHANGED_NODES);
449 * Frees space used by a queue node
451 static INLINE void free_qnode(qnode_t *qn) {
452 del_set(qn->conflicts);
453 del_set(qn->changed_nodes);
459 * Inserts a qnode in the sorted queue of the optimization unit. Queue is
460 * ordered by field 'size' (the size of the mis) in decreasing order.
462 static INLINE void ou_insert_qnode(unit_t *ou, qnode_t *qn) {
463 struct list_head *lh;
465 if (qnode_are_conflicting(qn, ou->nodes[0], ou->nodes[0])) {
466 /* root node is not in qnode */
471 qnode_max_ind_set(qn, ou);
472 /* do the insertion */
473 DBG((dbg, LEVEL_4, "\t Insert qnode color %d with size %d\n", qn->color, qn->mis_size));
475 while (lh->next != &ou->queue) {
476 qnode_t *curr = list_entry_queue(lh->next);
477 if (curr->mis_size <= qn->mis_size)
481 list_add(&qn->queue, lh);
485 * Tries to re-allocate colors of nodes in this opt unit, to achieve a lower
486 * number of copy instructions placed during SSA-destruction and lowering.
487 * Works only for opt units with exactly 1 root node, which is the
488 * case for approximately 80% of all phi classes and all register constrained
489 * nodes. (All other phi classes are reduced to this case.)
491 static void ou_optimize(unit_t *ou) {
494 bitset_t *pos_regs = bitset_alloca(ou->co->chordal_env->cls->n_regs);
496 DBG((dbg, LEVEL_1, "\tOptimizing unit:\n"));
497 for (i=0; i<ou->node_count; ++i)
498 DBG((dbg, LEVEL_1, "\t %n\n", ou->nodes[i]));
501 INIT_LIST_HEAD(&ou->queue);
502 arch_get_allocatable_regs(ou->co->chordal_env->arch_env, ou->nodes[0], arch_pos_make_out(0), ou->co->chordal_env->cls, pos_regs);
503 bitset_foreach(pos_regs, i)
504 ou_insert_qnode(ou, new_qnode(ou, i));
507 while (!list_empty(&ou->queue)) {
508 /* get head of queue */
509 curr = list_entry_queue(ou->queue.next);
510 list_del(&curr->queue);
511 DBG((dbg, LEVEL_2, "\t Examine qnode color %d with size %d\n", curr->color, curr->mis_size));
514 if (qnode_try_color(curr))
516 /* no success, so re-insert */
517 del_set(curr->changed_nodes);
518 curr->changed_nodes = new_set(set_cmp_node_stat_t, SLOTS_CHANGED_NODES);
519 ou_insert_qnode(ou, curr);
522 /* apply the best found qnode */
523 if (curr->mis_size >= 2) {
526 DBG((dbg, LEVEL_1, "\t Best color: %d Copies: %d/%d\n", curr->color, ou->interf+ou->node_count-curr->mis_size, ou->interf+ou->node_count-1));
527 /* globally pin root and eventually others */
528 pset_insert_ptr(pinned_global, ou->nodes[0]);
529 for (i=1; i<ou->node_count; ++i) {
530 ir_node *irn = ou->nodes[i];
531 int nc = qnode_get_new_color(curr, irn);
532 if (nc != NO_COLOR && nc == qnode_get_new_color(curr, ou->nodes[0]))
533 pset_insert_ptr(pinned_global, irn);
536 /* set color of all changed nodes */
537 for (ns = set_first(curr->changed_nodes); ns; ns = set_next(curr->changed_nodes)) {
538 /* NO_COLOR is possible, if we had an undo */
539 if (ns->new_color != NO_COLOR) {
540 DBG((dbg, LEVEL_2, "\t color(%n) := %d\n", ns->irn, ns->new_color));
541 set_irn_col(ou->co, ns->irn, ns->new_color);
546 /* free best qnode (curr) and queue */
548 list_for_each_entry_safe(qnode_t, curr, tmp, &ou->queue, queue)
552 void co_heur_opt(copy_opt_t *co) {
554 dbg = firm_dbg_register("ir.be.copyoptheur");
555 firm_dbg_set_mask(dbg, DEBUG_LVL);
556 if (!strcmp(co->name, DEBUG_IRG))
557 firm_dbg_set_mask(dbg, DEBUG_LVL_HEUR);
559 firm_dbg_set_mask(dbg, DEBUG_LVL);
561 pinned_global = pset_new_ptr(SLOTS_PINNED_GLOBAL);
562 list_for_each_entry(unit_t, curr, &co->units, units)
563 if (curr->node_count > 1)
566 del_pset(pinned_global);