9 #include "lower_intrinsics.h"
14 /** The array of all intrinsics that must be mapped. */
15 static i_record *intrinsics;
17 /** An array to cache all entities */
18 static entity *i_ents[iro_MaxOpcode];
21 * Maps all intrinsic calls that the backend support
22 * and map all instructions the backend did not support
25 void ia32_handle_intrinsics(void) {
26 if (intrinsics && ARR_LEN(intrinsics) > 0)
27 lower_intrinsics(intrinsics, ARR_LEN(intrinsics));
30 #define BINOP_Left_Low 0
31 #define BINOP_Left_High 1
32 #define BINOP_Right_Low 2
33 #define BINOP_Right_High 3
36 * Map an Add (a_l, a_h, b_l, b_h)
38 static int map_Add(ir_node *call, void *ctx) {
39 ir_graph *irg = current_ir_graph;
40 ir_node *block = get_nodes_block(call);
41 ir_node **params = get_Call_param_arr(call);
42 ir_node *l_res, *h_res, *res, *in[2];
43 ir_node *a_l = params[0];
44 ir_node *a_h = params[1];
45 ir_node *b_l = params[2];
46 ir_node *b_h = params[3];
48 /* l_res = a_l + b_l */
49 /* h_res = a_h + b_h + carry */
53 res = new_r_Tuple(irg, block, 2, in);
55 turn_into_tuple(call, pn_Call_max);
56 set_Tuple_pred(call, pn_Call_M_regular, get_irg_no_mem(irg));
57 set_Tuple_pred(call, pn_Call_X_except, get_irg_bad(irg));
58 set_Tuple_pred(call, pn_Call_T_result, res);
59 set_Tuple_pred(call, pn_Call_M_except, get_irg_bad(irg));
60 set_Tuple_pred(call, pn_Call_P_value_res_base, get_irg_bad(irg));
66 * Map a Sub (a_l, a_h, b_l, b_h)
68 static int map_Sub(ir_node *call, void *ctx) {
69 ir_graph *irg = current_ir_graph;
70 ir_node *block = get_nodes_block(call);
71 ir_node **params = get_Call_param_arr(call);
72 ir_node *l_res, *h_res, *res, *in[2];
73 ir_node *a_l = params[0];
74 ir_node *a_h = params[1];
75 ir_node *b_l = params[2];
76 ir_node *b_h = params[3];
78 /* l_res = a_l - b_l */
79 /* h_res = a_h - b_h - carry */
83 res = new_r_Tuple(irg, block, 2, in);
85 turn_into_tuple(call, pn_Call_max);
86 set_Tuple_pred(call, pn_Call_M_regular, get_irg_no_mem(irg));
87 set_Tuple_pred(call, pn_Call_X_except, get_irg_bad(irg));
88 set_Tuple_pred(call, pn_Call_T_result, res);
89 set_Tuple_pred(call, pn_Call_M_except, get_irg_bad(irg));
90 set_Tuple_pred(call, pn_Call_P_value_res_base, get_irg_bad(irg));
96 * Ia32 implementation.
98 * @param method the method type of the emulation function entity
99 * @param op the emulated ir_op
100 * @param imode the input mode of the emulated opcode
101 * @param omode the output mode of the emulated opcode
102 * @param context the context parameter
104 entity *ia32_create_intrinsic_fkt(ir_type *method, const ir_op *op,
105 const ir_mode *imode, const ir_mode *omode,
110 i_mapper_func mapper;
113 intrinsics = NEW_ARR_F(i_record, 0);
115 switch (get_op_code(op)) {
117 ent = &i_ents[iro_Add];
121 ent = &i_ents[iro_Sub];
125 return def_create_intrinsic_fkt(method, op, imode, omode, context);
129 #define IDENT(s) new_id_from_chars(s, sizeof(s)-1)
131 ident *id = mangle(IDENT("L"), get_op_name(op));
132 *ent = new_entity(get_glob_type(), id, method);
135 elt.i_call.kind = INTRINSIC_CALL;
136 elt.i_call.i_ent = *ent;
137 elt.i_call.i_mapper = mapper;
138 elt.i_call.ctx = NULL;
139 elt.i_call.link = NULL;
141 ARR_APP1(i_record, intrinsics, elt);