ir_loop *unused_livethrough_loop;
};
+#define foreach_worklist(entry, wl) list_for_each_entry(worklist_entry_t, entry, &(wl)->live_values, head)
+
typedef struct worklist_t worklist_t;
struct worklist_t {
struct list_head live_values;
static void deactivate_worklist(const worklist_t *worklist)
{
- struct list_head *entry;
+ worklist_entry_t *entry;
- list_for_each(entry, &worklist->live_values) {
- worklist_entry_t *wl_entry
- = list_entry(entry, worklist_entry_t, head);
- assert(worklist_contains(wl_entry->value));
- mark_irn_not_visited(wl_entry->value);
- set_irn_link(wl_entry->value, NULL);
+ foreach_worklist(entry, worklist) {
+ assert(worklist_contains(entry->value));
+ mark_irn_not_visited(entry->value);
+ set_irn_link(entry->value, NULL);
}
}
static void activate_worklist(const worklist_t *worklist)
{
- struct list_head *entry;
+ worklist_entry_t *entry;
- list_for_each(entry, &worklist->live_values) {
- worklist_entry_t *wl_entry
- = list_entry(entry, worklist_entry_t, head);
- assert(!worklist_contains(wl_entry->value));
- mark_irn_visited(wl_entry->value);
- set_irn_link(wl_entry->value, wl_entry);
+ foreach_worklist(entry, worklist) {
+ assert(!worklist_contains(entry->value));
+ mark_irn_visited(entry->value);
+ set_irn_link(entry->value, entry);
}
}
{
ir_node *reload_point = NULL;
size_t n_live_values = 0;
- struct list_head *entry;
+ worklist_entry_t *entry;
if (succ_block != NULL &&
(get_Block_n_cfgpreds(succ_block) > 1
reload_point = be_get_end_of_block_insertion_point(block);
}
- list_for_each(entry, &worklist->live_values) {
- worklist_entry_t *wl_entry = list_entry(entry, worklist_entry_t, head);
- ir_node *value = wl_entry->value;
+ foreach_worklist(entry, worklist) {
+ ir_node *value = entry->value;
worklist_entry_t *new_entry;
if (new_worklist->n_live_values >= n_regs)
if (reload_point != NULL) {
new_entry->reload_point = reload_point;
} else {
- new_entry->reload_point = wl_entry->reload_point;
+ new_entry->reload_point = entry->reload_point;
}
list_add_tail(&new_entry->head, &new_worklist->live_values);
ir_node *reload_point,
ir_loop *unused_livethrough_loop)
{
- worklist_entry_t *entry = obstack_alloc(&obst, sizeof(entry[0]));
- memset(entry, 0, sizeof(entry[0]));
+ worklist_entry_t *entry;
#ifdef EXPENSIVE_CHECKS
- {
- struct list_head *entry;
- list_for_each(entry, &worklist->live_values) {
- worklist_entry_t *wl_entry
- = list_entry(entry, worklist_entry_t, head);
- assert(wl_entry->value != value);
- }
+ foreach_worklist(entry, worklist) {
+ assert(entry->value != value);
}
#endif
+ entry = obstack_alloc(&obst, sizeof(*entry));
+ memset(entry, 0, sizeof(entry[0]));
+
entry->value = value;
entry->reload_point = reload_point;
entry->unused_livethrough_loop = unused_livethrough_loop;
some_block = get_irg_start_block(current_ir_graph);
}
- loop_blocks = NEW_ARR_F(block_or_loop_t,0);
+ loop_blocks = NEW_ARR_F(block_or_loop_t, 0);
current_loop = loop;
ir_reserve_resources(current_ir_graph, IR_RESOURCE_BLOCK_VISITED);
worklist_t *end_worklist = pred_block_info->end_worklist;
ir_loop *pred_loop = get_irn_loop(pred_block);
bool is_loop_entry = false;
- struct list_head *entry;
+ worklist_entry_t *entry;
assert(end_worklist != NULL);
/* reload missing values */
activate_worklist(end_worklist);
- list_for_each(entry, &start_worklist->live_values) {
- worklist_entry_t *wl_entry
- = list_entry(entry, worklist_entry_t, head);
- ir_node *value = wl_entry->value;
+ foreach_worklist(entry, start_worklist) {
+ ir_node *value = entry->value;
+
+ if (!is_loop_entry && entry->unused_livethrough_loop != NULL)
+ continue;
if (is_Phi(value) && get_nodes_block(value) == block) {
value = get_irn_n(value, i);
if (!arch_irn_consider_in_reg_alloc(cls, value))
continue;
}
-
if (worklist_contains(value))
continue;
- if (wl_entry->unused_livethrough_loop != NULL && !is_loop_entry)
- continue;
-
be_add_reload_on_edge(senv, value, block, i, cls, 1);
}