2 * Author: Daniel Grund, Sebastian Hack, Matthias Braun
4 * Copyright: (c) Universitaet Karlsruhe
5 * Licence: This file protected by GPL - GNU GENERAL PUBLIC LICENSE.
16 #include "iredges_t.h"
24 #include "unionfind.h"
28 #include "besched_t.h"
32 #include "bechordal_t.h"
33 #include "bejavacoal.h"
35 /* This enables re-computation of values. Current state: Unfinished and buggy. */
38 typedef struct _reloader_t reloader_t;
45 typedef struct _spill_info_t {
46 ir_node *spilled_node;
47 reloader_t *reloaders;
53 const arch_register_class_t *cls;
54 const be_chordal_env_t *chordal_env;
56 set *spills; /**< all spill_info_t's, which must be placed */
57 pset *mem_phis; /**< set of all special spilled phis. allocated and freed separately */
59 DEBUG_ONLY(firm_dbg_module_t *dbg;)
63 * Compare two spill infos.
65 static int cmp_spillinfo(const void *x, const void *y, size_t size) {
66 const spill_info_t *xx = x;
67 const spill_info_t *yy = y;
68 return xx->spilled_node != yy->spilled_node;
72 * Returns spill info for a specific value (the value that is to be spilled)
74 static spill_info_t *get_spillinfo(const spill_env_t *env, ir_node *value) {
75 spill_info_t info, *res;
76 int hash = HASH_PTR(value);
78 info.spilled_node = value;
79 res = set_find(env->spills, &info, sizeof(info), hash);
82 info.reloaders = NULL;
84 res = set_insert(env->spills, &info, sizeof(info), hash);
91 /* Sets the debug module of a spill environment. */
92 void be_set_spill_env_dbg_module(spill_env_t *env, firm_dbg_module_t *dbg) {
97 /* Creates a new spill environment. */
98 spill_env_t *be_new_spill_env(const be_chordal_env_t *chordal_env) {
99 spill_env_t *env = xmalloc(sizeof(env[0]));
100 env->spills = new_set(cmp_spillinfo, 1024);
101 env->cls = chordal_env->cls;
102 env->chordal_env = chordal_env;
103 env->mem_phis = pset_new_ptr_default();
104 obstack_init(&env->obst);
108 /* Deletes a spill environment. */
109 void be_delete_spill_env(spill_env_t *env) {
110 del_set(env->spills);
111 del_pset(env->mem_phis);
112 obstack_free(&env->obst, NULL);
117 * Schedules a node after an instruction. (That is the place after all projs and phis
118 * that are scheduled after the instruction)
120 static void sched_add_after_insn(ir_node *sched_after, ir_node *node) {
121 ir_node *next = sched_next(sched_after);
122 while(!sched_is_end(next)) {
123 if(!is_Proj(next) && !is_Phi(next))
125 next = sched_next(next);
128 if(sched_is_end(next)) {
129 next = sched_last(get_nodes_block(sched_after));
130 sched_add_after(next, node);
132 sched_add_before(next, node);
139 * @param senv the spill environment
140 * @param irn the node that should be spilled
141 * @param ctx_irn an user of the spilled node
143 * @return a be_Spill node
145 static void spill_irn(spill_env_t *env, spill_info_t *spillinfo) {
146 const be_main_env_t *mainenv = env->chordal_env->birg->main_env;
147 ir_node *to_spill = spillinfo->spilled_node;
149 DBG((env->dbg, LEVEL_1, "%+F\n", to_spill));
151 /* Trying to spill an already spilled value, no need for a new spill
152 * node then, we can simply connect to the same one for this reload
154 if(be_is_Reload(to_spill)) {
155 spillinfo->spill = get_irn_n(to_spill, be_pos_Reload_mem);
159 spillinfo->spill = be_spill(mainenv->arch_env, to_spill);
160 sched_add_after_insn(to_spill, spillinfo->spill);
163 static void spill_node(spill_env_t *env, spill_info_t *spillinfo);
166 * If the first usage of a Phi result would be out of memory
167 * there is no sense in allocating a register for it.
168 * Thus we spill it and all its operands to the same spill slot.
169 * Therefore the phi/dataB becomes a phi/Memory
171 * @param senv the spill environment
172 * @param phi the Phi node that should be spilled
173 * @param ctx_irn an user of the spilled node
175 static void spill_phi(spill_env_t *env, spill_info_t *spillinfo) {
176 ir_node *phi = spillinfo->spilled_node;
178 int arity = get_irn_arity(phi);
179 ir_node *block = get_nodes_block(phi);
184 /* build a new PhiM */
185 ins = alloca(sizeof(ir_node*) * arity);
186 for(i = 0; i < arity; ++i) {
187 ins[i] = get_irg_bad(env->chordal_env->irg);
189 spillinfo->spill = new_r_Phi(env->chordal_env->irg, block, arity, ins, mode_M);
191 for(i = 0; i < arity; ++i) {
192 ir_node *arg = get_irn_n(phi, i);
193 spill_info_t *arg_info = get_spillinfo(env, arg);
195 spill_node(env, arg_info);
197 set_irn_n(spillinfo->spill, i, arg_info->spill);
204 * @param senv the spill environment
205 * @param to_spill the node that should be spilled
207 static void spill_node(spill_env_t *env, spill_info_t *spillinfo) {
210 // the node should be tagged for spilling already...
211 if(spillinfo->spill != NULL)
214 to_spill = spillinfo->spilled_node;
215 if (is_Phi(to_spill) && pset_find_ptr(env->mem_phis, spillinfo->spilled_node)) {
216 spill_phi(env, spillinfo);
218 spill_irn(env, spillinfo);
222 static INLINE ir_node *skip_projs(ir_node *node) {
223 while(is_Proj(node)) {
224 node = sched_next(node);
225 assert(!sched_is_end(node));
233 * Searchs the schedule backwards until we reach the first use or def of a
235 * Returns the node after this node (so that you can do sched_add_before)
237 static ir_node *find_last_use_def(spill_env_t *env, ir_node *block, ir_node *value) {
238 ir_node *node, *last;
241 sched_foreach_reverse(block, node) {
248 return skip_projs(last);
250 for(i = 0, arity = get_irn_arity(node); i < arity; ++i) {
251 ir_node *arg = get_irn_n(node, i);
253 return skip_projs(last);
259 // simply return first node if no def or use found
260 return sched_first(block);
267 * Check if a spilled node could be rematerialized.
269 * @param senv the spill environment
270 * @param spill the Spill node
271 * @param spilled the node that was spilled
272 * @param reloader a irn that requires a reload
274 static int check_remat_conditions(spill_env_t *senv, ir_node *spilled, ir_node *reloader) {
277 /* check for 'normal' spill and general remat condition */
278 if (!arch_irn_is(senv->chordal_env->birg->main_env->arch_env, spilled, rematerializable))
281 /* check availability of original arguments */
282 if (is_Block(reloader)) {
284 /* we want to remat at the end of a block.
285 * thus all arguments must be alive at the end of the block
287 for (pos=0, max=get_irn_arity(spilled); pos<max; ++pos) {
288 ir_node *arg = get_irn_n(spilled, pos);
289 if (!is_live_end(reloader, arg))
295 /* we want to remat before the insn reloader
296 * thus an arguments is alive if
297 * - it interferes with the reloaders result
299 * - or it is (last-) used by reloader itself
301 for (pos=0, max=get_irn_arity(spilled); pos<max; ++pos) {
302 ir_node *arg = get_irn_n(spilled, pos);
305 if (values_interfere(reloader, arg))
308 for (i=0, m=get_irn_arity(reloader); i<m; ++i) {
309 ir_node *rel_arg = get_irn_n(reloader, i);
314 /* arg is not alive before reloader */
326 #else /* BUGGY_REMAT */
329 * A very simple rematerialization checker.
331 * @param senv the spill environment
332 * @param spill the Spill node
333 * @param spilled the node that was spilled
334 * @param reloader a irn that requires a reload
336 static int check_remat_conditions(spill_env_t *senv, ir_node *spilled, ir_node *reloader) {
337 const arch_env_t *aenv = senv->chordal_env->birg->main_env->arch_env;
339 return get_irn_arity(spilled) == 0 &&
340 arch_irn_is(aenv, spilled, rematerializable);
343 #endif /* BUGGY_REMAT */
346 * Re-materialize a node.
348 * @param senv the spill environment
349 * @param spilled the node that was spilled
350 * @param reloader a irn that requires a reload
352 static ir_node *do_remat(spill_env_t *senv, ir_node *spilled, ir_node *reloader) {
354 ir_node *bl = (is_Block(reloader)) ? reloader : get_nodes_block(reloader);
356 /* recompute the value */
357 res = new_ir_node(get_irn_dbg_info(spilled), senv->chordal_env->irg, bl,
359 get_irn_mode(spilled),
360 get_irn_arity(spilled),
361 get_irn_in(spilled) + 1);
362 copy_node_attr(spilled, res);
364 DBG((senv->dbg, LEVEL_1, "Insert remat %+F before reloader %+F\n", res, reloader));
366 /* insert in schedule */
367 if (is_Block(reloader)) {
368 ir_node *insert = sched_skip(reloader, 0, sched_skip_cf_predicator, (void *) senv->chordal_env->birg->main_env->arch_env);
369 sched_add_after(insert, res);
371 sched_add_before(reloader, res);
377 void be_spill_phi(spill_env_t *env, ir_node *node) {
380 assert(is_Phi(node));
382 pset_insert_ptr(env->mem_phis, node);
384 // create spillinfos for the phi arguments
385 get_spillinfo(env, node);
386 for(i = 0, arity = get_irn_arity(node); i < arity; ++i) {
387 ir_node *arg = get_irn_n(node, i);
388 get_spillinfo(env, arg);
392 void be_insert_spills_reloads(spill_env_t *env) {
393 const arch_env_t *arch_env = env->chordal_env->birg->main_env->arch_env;
396 /* process each spilled node */
397 DBG((env->dbg, LEVEL_1, "Insert spills and reloads:\n"));
398 for(si = set_first(env->spills); si; si = set_next(env->spills)) {
400 ir_mode *mode = get_irn_mode(si->spilled_node);
401 pset *values = pset_new_ptr(16);
403 /* go through all reloads for this spill */
404 for(rld = si->reloaders; rld; rld = rld->next) {
407 if (check_remat_conditions(env, si->spilled_node, rld->reloader)) {
408 new_val = do_remat(env, si->spilled_node, rld->reloader);
410 /* make sure we have a spill */
414 new_val = be_reload(arch_env, env->cls, rld->reloader, mode, si->spill);
417 DBG((env->dbg, LEVEL_1, " %+F of %+F before %+F\n", new_val, si->spilled_node, rld->reloader));
418 pset_insert_ptr(values, new_val);
421 if(pset_count(values) > 0) {
422 /* introduce copies, rewire the uses */
423 pset_insert_ptr(values, si->spilled_node);
424 be_ssa_constr_set_ignore(env->chordal_env->dom_front, env->chordal_env->lv, values, env->mem_phis);
430 // reloads are placed now, but we might reuse the spill environment for further spilling decisions
431 del_set(env->spills);
432 env->spills = new_set(cmp_spillinfo, 1024);
435 void be_add_reload(spill_env_t *env, ir_node *to_spill, ir_node *before) {
439 assert(sched_is_scheduled(before));
440 assert(arch_irn_consider_in_reg_alloc(env->chordal_env->birg->main_env->arch_env, env->cls, to_spill));
442 info = get_spillinfo(env, to_spill);
444 if(is_Phi(to_spill)) {
446 // create spillinfos for the phi arguments
447 for(i = 0, arity = get_irn_arity(to_spill); i < arity; ++i) {
448 ir_node *arg = get_irn_n(to_spill, i);
449 get_spillinfo(env, arg);
453 rel = obstack_alloc(&env->obst, sizeof(rel[0]));
454 rel->reloader = before;
455 rel->next = info->reloaders;
456 info->reloaders = rel;
457 be_liveness_add_missing(env->chordal_env->lv);
460 void be_add_reload_on_edge(spill_env_t *env, ir_node *to_spill, ir_node *block, int pos) {
461 ir_node *predblock, *last;
463 /* simply add the reload to the beginning of the block if we only have 1 predecessor
464 * (we don't need to check for phis as there can't be any in a block with only 1 pred)
466 if(get_Block_n_cfgpreds(block) == 1) {
467 assert(!is_Phi(sched_first(block)));
468 be_add_reload(env, to_spill, sched_first(block));
472 /* We have to reload the value in pred-block */
473 predblock = get_Block_cfgpred_block(block, pos);
474 last = sched_last(predblock);
476 /* we might have projs and keepanys behind the jump... */
477 while(is_Proj(last) || be_is_Keep(last)) {
478 last = sched_prev(last);
479 assert(!sched_is_end(last));
481 assert(is_cfop(last));
483 // add the reload before the (cond-)jump
484 be_add_reload(env, to_spill, last);