fix warnings
[libfirm] / ir / lower / lower_hl.c
index 28f4d50..391466a 100644 (file)
@@ -119,7 +119,7 @@ static void lower_sel(ir_node *sel) {
 
                                /* Size of the array element */
                                tv = new_tarval_from_long(get_type_size_bytes(basetyp), mode_Int);
-                               last_size = new_rd_Const(dbg, irg, mode_Int, tv);
+                               last_size = new_rd_Const(dbg, irg, tv);
 
                                /*
                                 * We compute the offset part of dimension d_i recursively
@@ -188,7 +188,7 @@ static void lower_sel(ir_node *sel) {
 
                                newn = new_rd_Add(dbg, irg, bl, get_Sel_ptr(sel),
                                        new_rd_Mul(dbg, irg, bl, index,
-                                       new_r_Const(irg, idx_mode, tv),
+                                       new_r_Const(irg, tv),
                                        idx_mode),
                                        mode);
                        }
@@ -201,14 +201,14 @@ static void lower_sel(ir_node *sel) {
 
                        /* We need an additional load when accessing methods from a dispatch table. */
                        tv   = new_tarval_from_long(get_entity_offset(ent), mode_Int);
-                       cnst = new_rd_Const(dbg, irg, mode_Int, tv);
+                       cnst = new_rd_Const(dbg, irg, tv);
                        add  = new_rd_Add(dbg, irg, bl, get_Sel_ptr(sel), cnst, mode);
 #ifdef DO_CACHEOPT  /* cacheopt version */
-                       newn = new_rd_Load(dbg, irg, bl, get_Sel_mem(sel), sel, ent_mode);
+                       newn = new_rd_Load(dbg, irg, bl, get_Sel_mem(sel), sel, ent_mode, 0);
                        cacheopt_map_addrs_register_node(newn);
                        set_Load_ptr(newn, add);
 #else /* normal code */
-                       newn = new_rd_Load(dbg, irg, bl, get_Sel_mem(sel), add, ent_mode);
+                       newn = new_rd_Load(dbg, irg, bl, get_Sel_mem(sel), add, ent_mode, 0);
 #endif
                        newn = new_r_Proj(irg, bl, newn, ent_mode, pn_Load_res);
 
@@ -224,7 +224,7 @@ static void lower_sel(ir_node *sel) {
                                ir_mode *mode_UInt = get_reference_mode_unsigned_eq(mode);
 
                                tv = new_tarval_from_long(offset, mode_UInt);
-                               cnst = new_r_Const(irg, mode_UInt, tv);
+                               cnst = new_r_Const(irg, tv);
                                newn = new_rd_Add(dbg, irg, bl, newn, cnst, mode);
                        }
                } else {
@@ -258,8 +258,7 @@ static void lower_symconst(ir_node *symc) {
                tp   = get_SymConst_type(symc);
                assert(get_type_state(tp) == layout_fixed);
                mode = get_irn_mode(symc);
-               tv   = new_tarval_from_long(get_type_size_bytes(tp), mode);
-               newn = new_Const(get_irn_mode(symc), tv);
+               newn = new_Const_long(mode, get_type_size_bytes(tp));
                assert(newn);
                /* run the hooks */
                hook_lower(symc);
@@ -270,8 +269,7 @@ static void lower_symconst(ir_node *symc) {
                tp   = get_SymConst_type(symc);
                assert(get_type_state(tp) == layout_fixed);
                mode = get_irn_mode(symc);
-               tv   = new_tarval_from_long(get_type_alignment_bytes(tp), mode);
-               newn = new_Const(mode, tv);
+               newn = new_Const_long(mode, get_type_alignment_bytes(tp));
                assert(newn);
                /* run the hooks */
                hook_lower(symc);
@@ -288,8 +286,7 @@ static void lower_symconst(ir_node *symc) {
                ent  = get_SymConst_entity(symc);
                assert(get_type_state(get_entity_type(ent)) == layout_fixed);
                mode = get_irn_mode(symc);
-               tv   = new_tarval_from_long(get_entity_offset(ent), mode);
-               newn = new_Const(mode, tv);
+               newn = new_Const_long(mode, get_entity_offset(ent));
                assert(newn);
                /* run the hooks */
                hook_lower(symc);
@@ -300,7 +297,7 @@ static void lower_symconst(ir_node *symc) {
                ec   = get_SymConst_enum(symc);
                assert(get_type_state(get_enumeration_owner(ec)) == layout_fixed);
                tv   = get_enumeration_value(ec);
-               newn = new_Const(get_irn_mode(symc), tv);
+               newn = new_Const(tv);
                assert(newn);
                /* run the hooks */
                hook_lower(symc);
@@ -397,11 +394,11 @@ static void lower_bitfields_loads(ir_node *proj, ir_node *load) {
 
                if (shift_count_up) {
                        res = new_r_Shl(current_ir_graph, block, res,
-                               new_Const(mode_Iu, new_tarval_from_long(shift_count_up, mode_Iu)), mode);
+                               new_Const_long(mode_Iu, shift_count_up), mode);
                }
                if (shift_count_down) {
                        res = new_r_Shrs(current_ir_graph, block, res,
-                               new_Const(mode_Iu, new_tarval_from_long(shift_count_down, mode_Iu)), mode);
+                               new_Const_long(mode_Iu, shift_count_down), mode);
                }
        } else { /* unsigned */
                int shift_count_down  = bit_offset;
@@ -409,11 +406,11 @@ static void lower_bitfields_loads(ir_node *proj, ir_node *load) {
 
                if (shift_count_down) {
                        res = new_r_Shr(current_ir_graph, block, res,
-                               new_Const(mode_Iu, new_tarval_from_long(shift_count_down, mode_Iu)), mode);
+                               new_Const_long(mode_Iu, shift_count_down), mode);
                }
                if (bits != bf_bits) {
                        res = new_r_And(current_ir_graph, block, res,
-                               new_Const(mode, new_tarval_from_long(mask, mode)), mode);
+                               new_Const_long(mode, mask), mode);
                }
        }
 
@@ -481,20 +478,20 @@ static void lower_bitfields_stores(ir_node *store) {
 
        if (neg_mask) {
                /* there are some bits, normal case */
-               irn  = new_r_Load(current_ir_graph, block, mem, ptr, mode);
+               irn  = new_r_Load(current_ir_graph, block, mem, ptr, mode, 0);
                mem  = new_r_Proj(current_ir_graph, block, irn, mode_M, pn_Load_M);
                irn  = new_r_Proj(current_ir_graph, block, irn, mode, pn_Load_res);
 
                irn = new_r_And(current_ir_graph, block, irn,
-                       new_Const(mode, new_tarval_from_long(neg_mask, mode)), mode);
+                       new_Const_long(mode, neg_mask), mode);
 
                if (bit_offset > 0) {
                        value = new_r_Shl(current_ir_graph, block, value,
-                               new_Const(mode_Iu, new_tarval_from_long(bit_offset, mode_Iu)), mode);
+                               new_Const_long(mode_Iu, bit_offset), mode);
                }
 
                value = new_r_And(current_ir_graph, block, value,
-                       new_Const(mode, new_tarval_from_long(mask, mode)), mode);
+                       new_Const_long(mode, mask), mode);
 
                value = new_r_Or(current_ir_graph, block, value, irn, mode);
        }