10 #include "lower_intrinsics.h"
15 /** The array of all intrinsics that must be mapped. */
16 static i_record *intrinsics;
18 /** An array to cache all entities */
19 static entity *i_ents[iro_MaxOpcode];
22 * Maps all intrinsic calls that the backend support
23 * and map all instructions the backend did not support
26 void ia32_handle_intrinsics(void) {
27 if (intrinsics && ARR_LEN(intrinsics) > 0)
28 lower_intrinsics(intrinsics, ARR_LEN(intrinsics));
31 #define BINOP_Left_Low 0
32 #define BINOP_Left_High 1
33 #define BINOP_Right_Low 2
34 #define BINOP_Right_High 3
37 * Map an Add (a_l, a_h, b_l, b_h)
39 static int map_Add(ir_node *call, void *ctx) {
40 ir_graph *irg = current_ir_graph;
41 ir_node *block = get_nodes_block(call);
42 ir_node **params = get_Call_param_arr(call);
43 ir_node *l_res, *h_res, *res, *in[2];
44 ir_node *a_l = params[BINOP_Left_Low];
45 ir_node *a_h = params[BINOP_Left_High];
46 ir_node *b_l = params[BINOP_Right_Low];
47 ir_node *b_h = params[BINOP_Right_High];
49 /* l_res = a_l + b_l */
50 /* h_res = a_h + b_h + carry */
54 res = new_r_Tuple(irg, block, 2, in);
56 turn_into_tuple(call, pn_Call_max);
57 set_Tuple_pred(call, pn_Call_M_regular, get_irg_no_mem(irg));
58 set_Tuple_pred(call, pn_Call_X_except, get_irg_bad(irg));
59 set_Tuple_pred(call, pn_Call_T_result, res);
60 set_Tuple_pred(call, pn_Call_M_except, get_irg_bad(irg));
61 set_Tuple_pred(call, pn_Call_P_value_res_base, get_irg_bad(irg));
67 * Map a Sub (a_l, a_h, b_l, b_h)
69 static int map_Sub(ir_node *call, void *ctx) {
70 ir_graph *irg = current_ir_graph;
71 ir_node *block = get_nodes_block(call);
72 ir_node **params = get_Call_param_arr(call);
73 ir_node *l_res, *h_res, *res, *in[2];
74 ir_node *a_l = params[BINOP_Left_Low];
75 ir_node *a_h = params[BINOP_Left_High];
76 ir_node *b_l = params[BINOP_Right_Low];
77 ir_node *b_h = params[BINOP_Right_High];
79 /* l_res = a_l - b_l */
80 /* h_res = a_h - b_h - carry */
84 res = new_r_Tuple(irg, block, 2, in);
86 turn_into_tuple(call, pn_Call_max);
87 set_Tuple_pred(call, pn_Call_M_regular, get_irg_no_mem(irg));
88 set_Tuple_pred(call, pn_Call_X_except, get_irg_bad(irg));
89 set_Tuple_pred(call, pn_Call_T_result, res);
90 set_Tuple_pred(call, pn_Call_M_except, get_irg_bad(irg));
91 set_Tuple_pred(call, pn_Call_P_value_res_base, get_irg_bad(irg));
96 /* Ia32 implementation. */
97 entity *ia32_create_intrinsic_fkt(ir_type *method, const ir_op *op,
98 const ir_mode *imode, const ir_mode *omode,
103 i_mapper_func mapper;
106 intrinsics = NEW_ARR_F(i_record, 0);
108 switch (get_op_code(op)) {
110 ent = &i_ents[iro_Add];
114 ent = &i_ents[iro_Sub];
118 return def_create_intrinsic_fkt(method, op, imode, omode, context);
122 #define IDENT(s) new_id_from_chars(s, sizeof(s)-1)
124 ident *id = mangle(IDENT("L"), get_op_name(op));
125 *ent = new_entity(get_glob_type(), id, method);
128 elt.i_call.kind = INTRINSIC_CALL;
129 elt.i_call.i_ent = *ent;
130 elt.i_call.i_mapper = mapper;
131 elt.i_call.ctx = NULL;
132 elt.i_call.link = NULL;
134 ARR_APP1(i_record, intrinsics, elt);