}
}
- if (reqs[i]->type & arch_register_req_type_should_be_different) {
+ if (reqs[i]->type & arch_register_req_type_must_be_different) {
unsigned other = reqs[i]->other_different;
int i;
if (is_ia32_SwitchJmp(n)) {
fprintf(F, "pn_code = %ld\n", get_ia32_condcode(n));
} else if (is_ia32_CMov(n) || is_ia32_Set(n) || is_ia32_Jcc(n)) {
+ ia32_attr_t *attr = get_ia32_attr(n);
long pnc = get_ia32_condcode(n);
- fprintf(F, "pn_code = %ld (%s)\n", pnc, get_pnc_string(pnc));
+ fprintf(F, "pn_code = 0x%lX (%s)\n", pnc, get_pnc_string(pnc & pn_Cmp_True));
+ fprintf(F, "ins_permuted = %u \n", attr->data.ins_permuted);
+ fprintf(F, "cmp_unsigned = %u \n", attr->data.cmp_unsigned);
}
else if (is_ia32_CopyB(n) || is_ia32_CopyB_i(n)) {
fprintf(F, "size = %u\n", get_ia32_copyb_size(n));
}
- /* dump n_res */
fprintf(F, "n_res = %d\n", get_ia32_n_res(n));
-
- /* dump use_frame */
fprintf(F, "use_frame = %d\n", is_ia32_use_frame(n));
-
- /* commutative */
fprintf(F, "commutative = %d\n", is_ia32_commutative(n));
-
- /* need stackent */
fprintf(F, "need stackent = %d\n", is_ia32_need_stackent(n));
-
- /* dump latency */
+ fprintf(F, "is reload = %d\n", is_ia32_is_reload(n));
fprintf(F, "latency = %d\n", get_ia32_latency(n));
/* dump flags */
if (flags & arch_irn_flags_modify_sp) {
fprintf(F, " modify_sp");
}
+ if (flags & arch_irn_flags_modify_flags) {
+ fprintf(F, " modify_flags");
+ }
}
fprintf(F, " (%d)\n", flags);
* Sets the index register scale for address mode.
*/
void set_ia32_am_scale(ir_node *node, int scale) {
- ia32_attr_t *attr = get_ia32_attr(node);
+ ia32_attr_t *attr = get_ia32_attr(node);
+ assert(0 <= scale && scale < 4 && "AM scale out of range");
attr->data.am_scale = scale;
}
return attr->data.need_stackent;
}
+void set_ia32_is_reload(ir_node *node) {
+ ia32_attr_t *attr = get_ia32_attr(node);
+ attr->data.is_reload = 1;
+}
+
+int is_ia32_is_reload(const ir_node *node) {
+ const ia32_attr_t *attr = get_ia32_attr_const(node);
+ return attr->data.is_reload;
+}
+
+void set_ia32_is_spill(ir_node *node) {
+ ia32_attr_t *attr = get_ia32_attr(node);
+ attr->data.is_spill = 1;
+}
+
+int is_ia32_is_spill(const ir_node *node) {
+ const ia32_attr_t *attr = get_ia32_attr_const(node);
+ return attr->data.is_spill;
+}
+
+void set_ia32_is_remat(ir_node *node) {
+ ia32_attr_t *attr = get_ia32_attr(node);
+ attr->data.is_remat = 1;
+}
+
+int is_ia32_is_remat(const ir_node *node) {
+ const ia32_attr_t *attr = get_ia32_attr_const(node);
+ return attr->data.is_remat;
+}
+
/**
* Gets the mode of the stored/loaded value (only set for Store/Load)
*/
*/
unsigned get_ia32_exc_label(const ir_node *node) {
const ia32_attr_t *attr = get_ia32_attr_const(node);
- return attr->data.except_label;
+ return attr->data.has_except_label;
}
/**
*/
void set_ia32_exc_label(ir_node *node, unsigned flag) {
ia32_attr_t *attr = get_ia32_attr(node);
- attr->data.except_label = flag;
+ attr->data.has_except_label = flag;
+}
+
+/**
+ * Return the exception label id.
+ */
+ir_label_t get_ia32_exc_label_id(const ir_node *node) {
+ const ia32_attr_t *attr = get_ia32_attr_const(node);
+
+ assert(attr->data.has_except_label);
+ return attr->exc_label;
+}
+
+/**
+ * Assign the exception label id.
+ */
+void set_ia32_exc_label_id(ir_node *node, ir_label_t id) {
+ ia32_attr_t *attr = get_ia32_attr(node);
+
+ assert(attr->data.has_except_label);
+ attr->exc_label = id;
}
#ifndef NDEBUG
if (a->data.tp != b->data.tp)
return 1;
- if (a->data.except_label != b->data.except_label)
+ if (a->data.has_except_label != b->data.has_except_label)
return 1;
if (a->data.ins_permuted != b->data.ins_permuted
return 0;
}
+/**
+ * Hash function for Immediates
+ */
+static unsigned ia32_hash_Immediate(const ir_node *irn) {
+ const ia32_immediate_attr_t *a = get_ia32_immediate_attr_const(irn);
+
+ return HASH_PTR(a->symconst) + (a->sc_sign << 16) + a->offset;
+}
+
/** Compare node attributes for Immediates. */
static
int ia32_compare_immediate_attr(ir_node *a, ir_node *b)