+ loc.time = next_use.time;
+
+ if (improve_known_preds) {
+ if (available == AVAILABLE_EVERYWHERE) {
+ DB((dbg, DBG_START, " %+F taken (%u, live in all preds)\n",
+ node, loc.time));
+ return loc;
+ } else if(available == AVAILABLE_NOWHERE) {
+ DB((dbg, DBG_START, " %+F not taken (%u, live in no pred)\n",
+ node, loc.time));
+ loc.time = USES_INFINITY;
+ return loc;
+ }
+ }
+
+ if (!respectloopdepth || next_use.outermost_loop >= get_loop_depth(loop)) {
+ DB((dbg, DBG_START, " %+F taken (%u, loop %d)\n", node, loc.time,
+ next_use.outermost_loop));
+ } else {
+ loc.time = USES_PENDING;
+ DB((dbg, DBG_START, " %+F delayed (outerdepth %d < loopdepth %d)\n",
+ node, next_use.outermost_loop, get_loop_depth(loop)));
+ }
+
+ return loc;
+}
+
+/**
+ * Computes the start-workset for a block with multiple predecessors. We assume
+ * that at least 1 of the predeccesors is a back-edge which means we're at the
+ * beginning of a loop. We try to reload as much values as possible now so they
+ * don't get reloaded inside the loop.
+ */
+static void decide_start_workset(const ir_node *block)
+{
+ ir_loop *loop = get_irn_loop(block);
+ ir_node *first;
+ ir_node *node;
+ loc_t loc;
+ loc_t *starters;
+ loc_t *delayed;
+ int i, len, ws_count;
+ int free_slots, free_pressure_slots;
+ unsigned pressure;
+ int arity;
+ workset_t **pred_worksets;
+ bool all_preds_known;
+
+ /* check predecessors */
+ arity = get_irn_arity(block);
+ pred_worksets = ALLOCAN(workset_t*, arity);
+ all_preds_known = true;
+ for(i = 0; i < arity; ++i) {
+ ir_node *pred_block = get_Block_cfgpred_block(block, i);
+ block_info_t *pred_info = get_block_info(pred_block);
+
+ if (pred_info == NULL) {
+ pred_worksets[i] = NULL;
+ all_preds_known = false;
+ } else {
+ pred_worksets[i] = pred_info->end_workset;
+ }
+ }
+
+ /* Collect all values living at start of block */
+ starters = NEW_ARR_F(loc_t, 0);
+ delayed = NEW_ARR_F(loc_t, 0);
+
+ DB((dbg, DBG_START, "Living at start of %+F:\n", block));
+ first = sched_first(block);
+
+ /* check all Phis first */
+ sched_foreach(block, node) {
+ unsigned available;
+
+ if (! is_Phi(node))
+ break;
+ if (!arch_irn_consider_in_reg_alloc(cls, node))