2 * Copyright (C) 1995-2008 University of Karlsruhe. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * @brief lowers operations with mode_b. The result is a graph which
23 * might still contains some convs from/to mode_b, but no
24 * operations are performed on them anymore, they are just there
25 * so modes match. A backend can safely skip all mode_b convs.
26 * @author Matthias Braun, Christoph Mallon
46 static lower_mode_b_config_t config;
47 static ir_type *lowered_type = NULL;
48 static pdeq *lowered_nodes = NULL;
51 * Removes a node if its out-edge count has reached 0.
52 * temporary hack until we have proper automatic dead code elimination.
54 static void maybe_kill_node(ir_node *node)
59 if(get_irn_n_edges(node) != 0)
62 irg = get_irn_irg(node);
64 assert(!is_Bad(node));
66 arity = get_irn_arity(node);
67 for (i = 0; i < arity; ++i) {
68 set_irn_n(node, i, new_Bad());
70 set_nodes_block(node, new_Bad());
72 edges_node_deleted(node, irg);
75 static ir_node *create_not(dbg_info *dbgi, ir_node *node)
77 ir_graph *irg = current_ir_graph;
78 ir_node *block = get_nodes_block(node);
79 ir_mode *mode = config.lowered_mode;
80 tarval *tv_one = get_tarval_one(mode);
81 ir_node *one = new_d_Const(dbgi, mode, tv_one);
83 return new_rd_Eor(dbgi, irg, block, node, one, mode);
86 static ir_node *create_convb(ir_node *node)
88 ir_graph *irg = current_ir_graph;
89 ir_node *block = get_nodes_block(node);
90 ir_node *conv = new_rd_Conv(NULL, irg, block, node, mode_b);
95 static ir_type *create_lowered_type(void)
97 if(lowered_type == NULL) {
98 lowered_type = new_type_primitive(new_id_from_str("__lowered_mode_b"),
105 * creates a "set" node that produces a 0 or 1 based on a Cmp result
107 static ir_node *create_set(ir_node *node)
109 ir_graph *irg = current_ir_graph;
110 dbg_info *dbgi = get_irn_dbg_info(node);
111 ir_mode *mode = config.lowered_set_mode;
112 tarval *tv_one = get_tarval_one(mode);
113 ir_node *one = new_d_Const(dbgi, mode, tv_one);
114 ir_node *block = get_nodes_block(node);
115 tarval *tv_zero = get_tarval_null(mode);
116 ir_node *zero = new_d_Const(dbgi, mode, tv_zero);
118 ir_node *set = new_rd_Mux(dbgi, irg, block, node, zero, one, mode);
120 if (mode != config.lowered_mode) {
121 set = new_r_Conv(irg, block, set, config.lowered_mode);
127 static void adjust_method_type(ir_type *method_type)
133 n_params = get_method_n_params(method_type);
134 for(i = 0; i < n_params; ++i) {
135 ir_type *param = get_method_param_type(method_type, i);
136 if(get_type_mode(param) == mode_b) {
137 set_method_param_type(method_type, i, create_lowered_type());
141 n_res = get_method_n_ress(method_type);
142 for(i = 0; i < n_res; ++i) {
143 ir_type *res_type = get_method_res_type(method_type, i);
144 if(get_type_mode(res_type) == mode_b) {
145 set_method_res_type(method_type, i, create_lowered_type());
150 static ir_node *lower_node(ir_node *node)
152 ir_graph *irg = current_ir_graph;
153 dbg_info *dbgi = get_irn_dbg_info(node);
154 ir_node *block = get_nodes_block(node);
155 ir_mode *mode = config.lowered_mode;
158 assert(get_irn_mode(node) == mode_b);
160 res = get_irn_link(node);
164 /* TODO: be robust against phi-loops... */
165 switch (get_irn_opcode(node)) {
169 ir_node *unknown, *new_phi;
171 arity = get_irn_arity(node);
172 in = alloca(arity * sizeof(in[0]));
173 unknown = new_Unknown(config.lowered_mode);
174 for(i = 0; i < arity; ++i) {
177 new_phi = new_r_Phi(irg, block, arity, in, config.lowered_mode);
178 set_irn_link(node, new_phi);
179 pdeq_putr(lowered_nodes, node);
181 for(i = 0; i < arity; ++i) {
182 ir_node *in = get_irn_n(node, i);
183 ir_node *low_in = lower_node(in);
185 set_irn_n(new_phi, i, low_in);
195 ir_node *copy = exact_copy(node);
197 arity = get_irn_arity(node);
198 for(i = 0; i < arity; ++i) {
199 ir_node *in = get_irn_n(node, i);
200 ir_node *low_in = lower_node(in);
202 set_irn_n(copy, i, low_in);
204 set_irn_mode(copy, config.lowered_mode);
206 set_irn_link(node, copy);
207 pdeq_putr(lowered_nodes, node);
211 ir_node *op = get_Not_op(node);
212 ir_node *low_op = lower_node(op);
214 res = create_not(dbgi, low_op);
215 set_irn_link(node, res);
216 pdeq_putr(lowered_nodes, node);
220 ir_node *cond = get_Mux_sel(node);
221 ir_node *low_cond = lower_node(cond);
222 ir_node *v_true = get_Mux_true(node);
223 ir_node *low_v_true = lower_node(v_true);
224 ir_node *v_false = get_Mux_false(node);
225 ir_node *low_v_false = lower_node(v_false);
227 ir_node *and0 = new_rd_And(dbgi, irg, block, low_cond, low_v_true, mode);
228 ir_node *not_cond = create_not(dbgi, low_cond);
229 ir_node *and1 = new_rd_And(dbgi, irg, block, not_cond, low_v_false, mode);
230 ir_node *or = new_rd_Or(dbgi, irg, block, and0, and1, mode);
232 set_irn_link(node, or);
233 pdeq_putr(lowered_nodes, node);
237 ir_node *pred = get_Conv_op(node);
238 ir_mode *mode = get_irn_mode(pred);
239 tarval *tv_zeroc = get_tarval_null(mode);
240 ir_node *zero_cmp = new_d_Const(dbgi, mode, tv_zeroc);
243 ir_node *cmp = new_rd_Cmp(dbgi, irg, block, pred, zero_cmp);
244 ir_node *proj = new_rd_Proj(dbgi, irg, block, cmp, mode_b,
246 set = create_set(proj);
248 set_irn_link(node, set);
249 pdeq_putr(lowered_nodes, node);
253 ir_node *pred = get_Proj_pred(node);
256 ir_node *left = get_Cmp_left(pred);
257 ir_node *right = get_Cmp_right(pred);
258 ir_mode *cmp_mode = get_irn_mode(left);
261 if ((mode_is_int(cmp_mode) || mode_is_reference(cmp_mode)) && (
262 get_mode_size_bits(cmp_mode) < get_mode_size_bits(mode) ||
263 (mode_is_signed(cmp_mode) && is_Const(right) && is_Const_null(right))
265 int pnc = get_Proj_proj(node);
270 if(pnc == pn_Cmp_Lt) {
271 /* a < b -> (a - b) >> 31 */
274 } else if(pnc == pn_Cmp_Le) {
275 /* a <= b -> ~(a - b) >> 31 */
279 } else if(pnc == pn_Cmp_Gt) {
280 /* a > b -> (b - a) >> 31 */
283 } else if(pnc == pn_Cmp_Ge) {
284 /* a >= b -> ~(a - b) >> 31 */
291 int bits = get_mode_size_bits(mode);
292 tarval *tv = new_tarval_from_long(bits-1, mode_Iu);
293 ir_node *shift_cnt = new_d_Const(dbgi, mode_Iu, tv);
295 if(cmp_mode != mode) {
296 a = new_rd_Conv(dbgi, irg, block, a, mode);
297 b = new_rd_Conv(dbgi, irg, block, b, mode);
300 res = new_rd_Sub(dbgi, irg, block, a, b, mode);
302 res = new_rd_Not(dbgi, irg, block, res, mode);
304 res = new_rd_Shr(dbgi, irg, block, res, shift_cnt, mode);
306 set_irn_link(node, res);
307 pdeq_putr(lowered_nodes, node);
312 /* synthesize the 0/1 value */
313 set = create_set(node);
314 set_irn_link(node, set);
315 pdeq_putr(lowered_nodes, node);
317 } else if(is_Proj(pred) && is_Call(get_Proj_pred(pred))) {
318 ir_type *type = get_Call_type(get_Proj_pred(pred));
319 adjust_method_type(type);
320 set_irn_mode(node, mode);
322 } else if(is_Proj(pred) && is_Start(get_Proj_pred(pred))) {
323 ir_entity *entity = get_irg_entity(irg);
324 ir_type *type = get_entity_type(entity);
325 adjust_method_type(type);
326 set_irn_mode(node, mode);
330 panic("unexpected projb: %+F (pred: %+F)", node, pred);
333 tarval *tv = get_Const_tarval(node);
334 if(tv == get_tarval_b_true()) {
335 tarval *tv_one = get_tarval_one(mode);
336 res = new_d_Const(dbgi, mode, tv_one);
337 } else if(tv == get_tarval_b_false()) {
338 tarval *tv_zero = get_tarval_null(mode);
339 res = new_d_Const(dbgi, mode, tv_zero);
341 panic("invalid boolean const %+F", node);
343 set_irn_link(node, res);
344 pdeq_putr(lowered_nodes, node);
348 return new_Unknown(config.lowered_mode);
350 panic("didn't expect %+F to have mode_b", node);
354 static void lower_mode_b_walker(ir_node *node, void *env)
360 arity = get_irn_arity(node);
361 for(i = 0; i < arity; ++i) {
363 ir_node *in = get_irn_n(node, i);
364 if(get_irn_mode(in) != mode_b)
367 if(! config.lower_direct_cmp) {
369 (is_Mux(node) && get_irn_mode(node) != mode_b)) {
371 ir_node *pred = get_Proj_pred(in);
378 lowered_in = lower_node(in);
380 if(is_Return(node)) {
381 ir_entity *entity = get_irg_entity(current_ir_graph);
382 ir_type *type = get_entity_type(entity);
383 adjust_method_type(type);
384 } else if(is_Call(node)) {
385 ir_type *type = get_Call_type(node);
386 adjust_method_type(type);
388 lowered_in = create_convb(lowered_in);
390 set_irn_n(node, i, lowered_in);
394 add_identities(current_ir_graph->value_table, node);
398 static void clear_links(ir_node *node, void *env)
401 set_irn_link(node, NULL);
404 void ir_lower_mode_b(ir_graph *irg, const lower_mode_b_config_t *nconfig)
407 lowered_nodes = new_pdeq();
408 set_using_irn_link(irg);
410 irg_walk_graph(irg, clear_links, NULL, NULL);
411 irg_walk_graph(irg, lower_mode_b_walker, NULL, NULL);
413 while(!pdeq_empty(lowered_nodes)) {
414 ir_node *node = (ir_node*) pdeq_getr(lowered_nodes);
415 maybe_kill_node(node);
417 del_pdeq(lowered_nodes);
419 clear_using_irn_link(irg);