* Lowers unaligned Loads.
*/
static void lower_unaligned_Load(ir_node *load) {
+ (void) load;
/* NYI */
}
* Lowers unaligned Stores
*/
static void lower_unaligned_Store(ir_node *store) {
+ (void) store;
/* NYI */
}
if (env != NULL && get_Store_align(irn) == align_non_aligned)
lower_unaligned_Store(irn);
break;
+ case iro_Cast:
+ exchange(irn, get_Cast_op(irn));
+ break;
default:
break;
}
/* First step: lower bitfield access: must be run as long as Sels still exists. */
irg_walk_graph(irg, NULL, lower_bf_access, NULL);
- /* Finally: lower SymConst-Size and Sel nodes, unaligned Load/Stores. */
+ /* Finally: lower SymConst-Size and Sel nodes, Casts, unaligned Load/Stores. */
irg_walk_graph(irg, NULL, lower_irnode, NULL);
set_irg_phase_low(irg);