*/
static int can_escape(ir_node *n)
{
- int i, j, k;
+ int i;
/* should always be pointer mode or we made some mistake */
assert(mode_is_reference(get_irn_mode(n)));
ir_entity *ent;
if (is_SymConst_addr_ent(ptr)) {
+ size_t j;
ent = get_SymConst_entity(ptr);
/* we know the called entity */
- for (j = get_Call_n_params(succ) - 1; j >= 0; --j) {
- if (get_Call_param(succ, j) == n) {
+ for (j = get_Call_n_params(succ); j > 0;) {
+ if (get_Call_param(succ, --j) == n) {
/* n is the j'th param of the call */
if (get_method_param_access(ent, j) & ptr_access_store)
/* n is store in ent */
return 1;
}
}
- }
- else if (is_Sel(ptr)) {
+ } else if (is_Sel(ptr)) {
+ size_t k;
+
/* go through all possible callees */
- for (k = get_Call_n_callees(succ) - 1; k >= 0; --k) {
- ent = get_Call_callee(succ, k);
+ for (k = get_Call_n_callees(succ); k > 0;) {
+ size_t j;
+ ent = get_Call_callee(succ, --k);
if (ent == unknown_entity) {
/* we don't know what will be called, a possible escape */
return 1;
}
- for (j = get_Call_n_params(succ) - 1; j >= 0; --j) {
- if (get_Call_param(succ, j) == n) {
+ for (j = get_Call_n_params(succ); j > 0;) {
+ if (get_Call_param(succ, --j) == n) {
/* n is the j'th param of the call */
if (get_method_param_access(ent, j) & ptr_access_store)
/* n is store in ent */
}
}
}
- }
- else /* we don't know want will called */
+ } else /* we don't know want will called */
return 1;
break;
case iro_Tuple: {
ir_node *proj;
+ int j, k;
/* Bad: trace the tuple backwards */
for (j = get_irn_arity(succ) - 1; j >= 0; --j)
turn_into_tuple(alloc, pn_Alloc_max);
set_Tuple_pred(alloc, pn_Alloc_M, mem);
set_Tuple_pred(alloc, pn_Alloc_X_regular, new_r_Jmp(blk));
- set_Tuple_pred(alloc, pn_Alloc_X_except, new_r_Bad(irg));
+ set_Tuple_pred(alloc, pn_Alloc_X_except, new_r_Bad(irg, mode_X));
++env->nr_deads;
}
turn_into_tuple(alloc, pn_Alloc_max);
set_Tuple_pred(alloc, pn_Alloc_M, mem);
set_Tuple_pred(alloc, pn_Alloc_X_regular, new_r_Jmp(blk));
- set_Tuple_pred(alloc, pn_Alloc_X_except, new_r_Bad(irg));
+ set_Tuple_pred(alloc, pn_Alloc_X_except, new_r_Bad(irg, mode_X));
set_Tuple_pred(alloc, pn_Alloc_res, sel);
++env->nr_removed;
}
/* if allocs were removed somehow */
- if (env->nr_removed | env->nr_deads) {
- set_irg_outs_inconsistent(irg);
-
- if (env->nr_deads) {
- /* exception control flow might have been changed */
- set_irg_doms_inconsistent(irg);
- }
+ if (env->nr_removed && env->nr_deads) {
+ /* exception control flow might have been changed */
+ set_irg_doms_inconsistent(irg);
}
}
mem = get_Call_mem(call);
blk = get_nodes_block(call);
turn_into_tuple(call, pn_Call_max);
- set_Tuple_pred(call, pn_Call_M, mem);
- set_Tuple_pred(call, pn_Call_X_regular, new_r_Jmp(blk));
- set_Tuple_pred(call, pn_Call_X_except, new_r_Bad(irg));
- set_Tuple_pred(call, pn_Call_T_result, new_r_Bad(irg));
- set_Tuple_pred(call, pn_Call_P_value_res_base, new_r_Bad(irg));
+ set_Tuple_pred(call, pn_Call_M, mem);
+ set_Tuple_pred(call, pn_Call_X_regular, new_r_Jmp(blk));
+ set_Tuple_pred(call, pn_Call_X_except, new_r_Bad(irg, mode_X));
+ set_Tuple_pred(call, pn_Call_T_result, new_r_Bad(irg, mode_T));
++env->nr_deads;
}