set_Store_ptr(store, ptr);
} /* lower_bitfields_stores */
+/**
+ * Lowers unaligned Loads.
+ */
+static void lower_unaligned_Load(ir_node *load) {
+ /* NYI */
+}
+
+/**
+ * Lowers unaligned Stores
+ */
+static void lower_unaligned_Store(ir_node *store) {
+ /* NYI */
+}
+
/**
* lowers IR-nodes, called from walker
*/
case iro_SymConst:
lower_symconst(irn);
break;
+ case iro_Load:
+ if (env != NULL && get_Load_align(irn) == align_non_aligned)
+ lower_unaligned_Load(irn);
+ break;
+ case iro_Store:
+ if (env != NULL && get_Store_align(irn) == align_non_aligned)
+ lower_unaligned_Store(irn);
+ break;
default:
break;
}
/* First step: lower bitfield access: must be run as long as Sels still exists. */
irg_walk_graph(irg, NULL, lower_bf_access, NULL);
- /* Finally: lower SymConst-Size and Sel nodes. */
+ /* Finally: lower SymConst-Size and Sel nodes, unaligned Load/Stores. */
irg_walk_graph(irg, NULL, lower_irnode, NULL);
set_irg_phase_low(irg);