2 * Copyright (C) 1995-2009 University of Karlsruhe. All right reserved.
4 * This file is part of libFirm.
6 * This file may be distributed and/or modified under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation and appearing in the file LICENSE.GPL included in the
9 * packaging of this file.
11 * Licensees holding valid libFirm Professional Edition licenses may use
12 * this file in accordance with the libFirm Commercial License.
13 * Agreement provided with the Software.
15 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
16 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * @brief analyze graph to provide value range information
32 #include "irgraph_t.h"
41 static void *VISITED = &v;
43 typedef struct worklist_t worklist_t;
45 struct list_head nodes;
53 int update_vrp_data(ir_node *node)
56 tarval *new_bits_set = get_tarval_bad();
57 tarval *new_bits_not_set = get_tarval_bad();
58 tarval *new_range_bottom = get_tarval_bad();
59 tarval *new_range_top = get_tarval_bad();
60 ir_node *new_bits_node = NULL;
61 ir_node *new_range_node = NULL;
62 enum range_types new_range_type = VRP_UNDEFINED;
63 enum range_ops new_range_op = VRP_NONE;
64 int something_changed = 0;
67 /* TODO: Check if all predecessors have valid VRP information*/
70 if (!mode_is_int(get_irn_mode(node))) {
71 return 0; /* we don't optimize for non-int-nodes*/
75 tarval *tv = get_Const_tarval(node);
78 new_bits_not_set = tarval_not(tv);
79 new_range_bottom = tv;
81 new_range_type = VRP_RANGE;
82 } else if (is_And(node)) {
83 ir_node *pred0 = get_And_left(node);
84 ir_node *pred1 = get_And_right(node);
87 new_bits_set = tarval_and(pred0->vrp.bits_set, pred1->vrp.bits_set);
88 new_bits_not_set = tarval_or(pred0->vrp.bits_not_set, pred1->vrp.bits_not_set);
90 tmp = tarval_not(pred0->vrp.bits_set);
91 tmp = tarval_eor(pred0->vrp.bits_not_set, tmp);
92 /*check if one of the predecessors is completely determined*/
93 if (tarval_is_null(tmp)) {
94 new_bits_node = pred1;
97 tmp = tarval_not(pred1->vrp.bits_set);
98 tmp = tarval_eor(pred1->vrp.bits_not_set, tmp);
99 if (tarval_is_null(tmp)) {
100 new_bits_node = pred0;
102 } else if (is_Add(node)) {
103 ir_node *pred0 = get_Add_left(node);
104 ir_node *pred1 = get_Add_right(node);
105 int overflow_top, overflow_bottom;
106 tarval *new_top, *new_bottom;
108 if (pred0->vrp.range_type == VRP_UNDEFINED || pred1->vrp.range_type ==
109 VRP_UNDEFINED || pred0->vrp.range_type == VRP_VARYING ||
110 pred1->vrp.range_type == VRP_VARYING) {
114 new_top = tarval_add(pred0->vrp.range_top, pred1->vrp.range_top);
115 overflow_top = tarval_carry();
116 new_bottom = tarval_add(pred0->vrp.range_bottom, pred1->vrp.range_bottom);
117 overflow_bottom = tarval_carry();
119 if (!overflow_top && !overflow_bottom && pred0->vrp.range_type == VRP_RANGE
120 &&pred1->vrp.range_type == VRP_RANGE) {
121 new_range_bottom = new_bottom;
122 new_range_top = new_top;
123 new_range_type = VRP_RANGE;
126 if (overflow_top || overflow_bottom) {
127 /* TODO Implement overflow handling*/
128 new_range_type = VRP_UNDEFINED;
130 } else if (is_Sub(node)) {
131 ir_node *pred0 = get_Sub_left(node);
132 ir_node *pred1 = get_Sub_right(node);
133 int overflow_top, overflow_bottom;
134 tarval *new_top, *new_bottom;
136 if (pred0->vrp.range_type == VRP_UNDEFINED || pred1->vrp.range_type ==
141 new_top = tarval_sub(pred0->vrp.range_top, pred1->vrp.range_top, NULL);
142 overflow_top = tarval_carry();
143 new_bottom = tarval_sub(pred0->vrp.range_bottom, pred1->vrp.range_bottom, NULL);
144 overflow_bottom = tarval_carry();
146 if (!overflow_top && !overflow_bottom && pred0->vrp.range_type == VRP_RANGE
147 &&pred1->vrp.range_type == VRP_RANGE) {
148 new_range_bottom = new_bottom;
149 new_range_top = new_top;
150 new_range_type = VRP_RANGE;
153 if (overflow_top || overflow_bottom) {
154 /* TODO Implement overflow handling*/
156 } else if (is_Or(node)) {
157 ir_node *a = get_Or_left(node);
158 ir_node *b = get_Or_right(node);
161 new_bits_set = tarval_or(a->vrp.bits_set, b->vrp.bits_set);
162 new_bits_not_set = tarval_and(a->vrp.bits_not_set, b->vrp.bits_not_set);
164 tmp = tarval_not(a->vrp.bits_set);
165 tmp = tarval_eor(a->vrp.bits_not_set, tmp);
166 /*check if one of the predecessors is completely determined*/
167 if (tarval_is_null(tmp)) {
171 tmp = tarval_not(b->vrp.bits_set);
172 tmp = tarval_eor(b->vrp.bits_not_set, tmp);
173 if (tarval_is_null(tmp)) {
177 } else if (is_Rotl(node)) {
178 ir_node *a = get_Rotl_left(node);
179 ir_node *b = get_Rotl_right(node);
181 /* We can only compute this if the right value is a constant*/
183 tarval *bits_set, *bits_not_set;
184 bits_set = tarval_rotl(a->vrp.bits_set, get_Const_tarval(b));
185 bits_not_set = tarval_rotl(a->vrp.bits_not_set, get_Const_tarval(b));
187 new_bits_set = tarval_or(bits_set, node->vrp.bits_set);
188 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
191 } else if (is_Shl(node)) {
192 ir_node *a = get_Shl_left(node);
193 ir_node *b = get_Shl_right(node);
195 /* We can only compute this if the right value is a constant*/
197 tarval *bits_set, *bits_not_set;
198 ir_mode *m = get_tarval_mode(node->vrp.bits_not_set);
199 bits_set = tarval_shl(a->vrp.bits_set, get_Const_tarval(b));
200 bits_not_set = tarval_shl(a->vrp.bits_not_set, get_Const_tarval(b));
202 new_bits_set = tarval_or(bits_set, node->vrp.bits_set);
203 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
205 bits_not_set = tarval_not( tarval_shl(
207 get_Const_tarval(b)));
208 new_bits_not_set = tarval_or(bits_not_set, new_bits_not_set);
212 } else if (is_Shr(node)) {
213 ir_node *a = get_Shr_left(node);
214 ir_node *b = get_Shr_right(node);
216 /* We can only compute this if the right value is a constant*/
218 tarval *bits_set, *bits_not_set;
219 ir_mode *m = get_tarval_mode(node->vrp.bits_not_set);
220 bits_set = tarval_shr(a->vrp.bits_set, get_Const_tarval(b));
221 bits_not_set = tarval_shr(a->vrp.bits_not_set, get_Const_tarval(b));
223 new_bits_set = tarval_or(bits_set, node->vrp.bits_set);
224 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
226 bits_not_set = tarval_not( tarval_shr(
228 get_Const_tarval(b)));
229 new_bits_not_set = tarval_or(bits_not_set, new_bits_not_set);
232 } else if (is_Shrs(node)) {
233 ir_node *a = get_Shrs_left(node);
234 ir_node *b = get_Shrs_right(node);
236 /* We can only compute this if the right value is a constant*/
238 tarval *bits_set, *bits_not_set;
239 ir_mode *m = get_tarval_mode(node->vrp.bits_not_set);
240 bits_set = tarval_shrs(a->vrp.bits_set, get_Const_tarval(b));
241 bits_not_set = tarval_shrs(a->vrp.bits_not_set, get_Const_tarval(b));
243 new_bits_set = tarval_or(bits_set, node->vrp.bits_set);
244 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
246 bits_not_set = tarval_not( tarval_shrs(
248 get_Const_tarval(b)));
249 new_bits_not_set = tarval_or(bits_not_set, new_bits_not_set);
252 } else if (is_Eor(node)) {
253 ir_node *a = get_Eor_left(node);
254 ir_node *b = get_Eor_right(node);
256 tarval *bits_set, *bits_not_set;
257 bits_not_set = tarval_or(
258 tarval_and(a->vrp.bits_set, b->vrp.bits_set),
259 tarval_and(a->vrp.bits_not_set,
260 b->vrp.bits_not_set));
262 bits_set = tarval_or(
263 tarval_and(a->vrp.bits_set, b->vrp.bits_not_set),
264 tarval_and(a->vrp.bits_not_set, b->vrp.bits_set));
266 new_bits_set = tarval_or(bits_set, node->vrp.bits_set);
267 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
269 } else if (is_Id(node)) {
270 ir_node *pred = get_Id_pred(node);
271 new_bits_set = pred->vrp.bits_set;
272 new_bits_not_set = pred->vrp.bits_not_set;
273 new_range_top = pred->vrp.range_top;
274 new_range_bottom = pred->vrp.range_bottom;
275 new_range_type = pred->vrp.range_type;
277 } else if (is_Not(node)) {
278 ir_node *pred = get_Not_op(node);
279 new_bits_set = tarval_or(pred->vrp.bits_not_set, node->vrp.bits_set);
280 new_bits_not_set = tarval_or(pred->vrp.bits_set, node->vrp.bits_not_set);
282 } else if (is_Conv(node)) {
283 ir_node *pred = get_Conv_op(node);
284 ir_mode *old_mode = get_irn_mode(pred);
286 tarval *bits_not_set;
288 if (!mode_is_int(old_mode))
291 new_mode = get_irn_mode(node);
293 /* The second and is needed if target type is smaller*/
294 bits_not_set = tarval_not(
295 tarval_convert_to(get_mode_all_one(old_mode),
298 bits_not_set = tarval_or(bits_not_set, tarval_convert_to(pred->vrp.bits_not_set, new_mode));
299 new_bits_not_set = tarval_or(bits_not_set, node->vrp.bits_not_set);
300 new_bits_set = tarval_and(
301 tarval_not(bits_not_set), tarval_convert_to(pred->vrp.bits_set, new_mode));
303 if (tarval_cmp(pred->vrp.range_top, get_mode_max(new_mode)) == pn_Cmp_Le) {
304 node->vrp.range_top = pred->vrp.range_top;
307 if (tarval_cmp(pred->vrp.range_bottom, get_mode_min(new_mode)) == pn_Cmp_Ge) {
308 node->vrp.range_bottom = pred->vrp.range_bottom;
311 } else if (is_Confirm(node)) {
312 pn_Cmp cmp = get_Confirm_cmp(node);
313 ir_node *bound = get_Confirm_bound(node);
315 /** @todo: Handle non-Const bounds */
317 if (cmp == pn_Cmp_Lg) {
318 /** @todo: Is there some way to preserve the information? */
319 new_range_type = VRP_ANTIRANGE;
320 if (is_Const(bound)) {
321 new_range_top = get_Const_tarval(bound);
322 new_range_bottom = get_Const_tarval(bound);
324 } else if (cmp == pn_Cmp_Le) {
325 if (node->vrp.range_type == VRP_UNDEFINED) {
326 new_range_type = VRP_RANGE;
327 if (is_Const(bound)) {
328 new_range_top = get_Const_tarval(bound);
330 new_range_bottom = get_tarval_min(get_irn_mode(node));
331 } else if (node->vrp.range_type == VRP_RANGE) {
332 if (is_Const(bound)) {
333 if (tarval_cmp(node->vrp.range_top,
334 get_Const_tarval(bound)) == pn_Cmp_Le) {
335 new_range_top = get_Const_tarval(bound);
337 new_range_bottom = get_tarval_min(get_irn_mode(node));
339 } else if (node->vrp.range_type == VRP_ANTIRANGE) {
340 /** @todo: How do we manage not to get a never ending loop? */
345 } else if (is_Phi(node)) {
346 /* combine all ranges*/
348 int num = get_Phi_n_preds(node);
351 tarval *range_top, *range_bottom, *bits_set, *bits_not_set;
352 enum range_types range_type;
356 pred = get_Phi_pred(node,0);
357 range_top = pred->vrp.range_top;
358 range_bottom = pred->vrp.range_bottom;
359 range_type = pred->vrp.range_type;
360 bits_set = pred->vrp.bits_set;
361 bits_not_set = pred->vrp.bits_not_set;
363 for (i = 1; i < num; i++) {
364 pred = get_Phi_pred(node, i);
365 if (range_type == VRP_RANGE && pred->vrp.range_type ==
367 cmp = tarval_cmp(range_top, pred->vrp.range_top);
368 if (cmp == pn_Cmp_Lt) {
369 range_top = pred->vrp.range_top;
371 cmp = tarval_cmp(range_bottom, pred->vrp.range_bottom);
372 if (cmp == pn_Cmp_Gt) {
373 range_bottom = pred->vrp.range_bottom;
376 range_type = VRP_VARYING;
381 new_range_type = range_type;
382 new_range_top = range_top;
383 new_range_bottom = range_bottom;
386 /* unhandled, therefore never updated*/
392 /* TODO: Check, if there can be information derived from any of these:
393 is_Abs(node) is_Alloc(node) is_Anchor(node) is_Borrow(node) is_Bound(node)
394 is_Break(node) is_Builtin(node) is_Call(node) is_CallBegin(node)
395 is_Carry(node) is_Cast(node) is_Cmp(node) is_Cond(node)
396 is_CopyB(node) is_Div(node) is_DivMod(node) is_Dummy(node)
397 is_End(node) is_EndExcept(node) is_EndReg(node) is_Filter(node) is_Free(node)
398 is_IJmp(node) is_InstOf(node) is_Jmp(node) is_Load(node) is_Minus(node)
399 is_Mod(node) is_Mul(node) is_Mulh(node) is_Mux(node) is_NoMem(node)
400 is_Pin(node) is_Proj(node) is_Quot(node)
401 is_Raise(node) is_Return(node) is_Sel(node) is_Start(node) is_Store(node)
402 is_SymConst(node) is_Sync(node) is_Tuple(node)
405 /* Merge the newly calculated values with those that might already exist*/
407 if (new_bits_set != tarval_bad) {
408 new_bits_set = tarval_or(new_bits_set, node->vrp.bits_set);
409 if (tarval_cmp(new_bits_set, node->vrp.bits_set) != pn_Cmp_Eq) {
410 something_changed = 1;
411 node->vrp.bits_set = new_bits_set;
415 if (new_bits_not_set != tarval_bad) {
416 new_bits_not_set = tarval_or(new_bits_not_set, node->vrp.bits_not_set);
418 if (tarval_cmp(new_bits_not_set, node->vrp.bits_not_set) != pn_Cmp_Eq) {
419 something_changed = 1;
420 node->vrp.bits_not_set = new_bits_not_set;
424 if (node->vrp.bits_node == NULL && new_bits_node != NULL) {
425 something_changed = 1;
426 node->vrp.bits_node = new_bits_node;
429 if (node->vrp.range_type == VRP_UNDEFINED &&
430 new_range_type != VRP_UNDEFINED) {
431 something_changed = 1;
432 node->vrp.range_type = new_range_type;
433 node->vrp.range_bottom = new_range_bottom;
434 node->vrp.range_top = new_range_top;
435 node->vrp.range_op = new_range_op;
436 node->vrp.range_node = new_range_node;
438 } else if (node->vrp.range_type == VRP_RANGE) {
439 if (new_range_type == VRP_RANGE) {
440 if ((new_range_node == NULL && node->vrp.range_node == NULL) ||
441 (new_range_node == node->vrp.range_node &&
442 new_range_op == node->vrp.range_op)) {
443 if (tarval_cmp(node->vrp.range_bottom, new_range_bottom) == pn_Cmp_Lt) {
444 something_changed = 1;
445 node->vrp.range_bottom = new_range_bottom;
447 if (tarval_cmp(node->vrp.range_top, new_range_top) == pn_Cmp_Gt) {
448 something_changed = 1;
449 node->vrp.range_top = new_range_top;
453 /* prefer the absolute value*/
454 if (new_range_node == NULL && node->vrp.range_node != NULL) {
455 something_changed = 1;
456 node->vrp.range_node = NULL;
457 node->vrp.range_top = new_range_top;
458 node->vrp.range_bottom = new_range_bottom;
462 if (new_range_type == VRP_ANTIRANGE) {
463 /* if they are overlapping, cut the range.*/
464 /* TODO: Maybe we can preserve more information here*/
465 if (new_range_node == NULL && node->vrp.range_node == NULL) {
466 if (tarval_cmp(node->vrp.range_bottom, new_range_top) == pn_Cmp_Gt &&
467 tarval_cmp(node->vrp.range_bottom, new_range_bottom) == pn_Cmp_Gt) {
468 something_changed = 1;
469 node->vrp.range_bottom = new_range_top;
471 } else if (tarval_cmp(node->vrp.range_top, new_range_bottom) == pn_Cmp_Gt &&
472 tarval_cmp(node->vrp.range_top, new_range_top) == pn_Cmp_Lt) {
473 something_changed = 1;
474 node->vrp.range_top = new_range_bottom;
477 /* We can not handle the case where the anti range is in the*/
481 /* prefer the absolute value*/
482 if (new_range_node == NULL && node->vrp.range_node != NULL) {
483 something_changed = 1;
484 node->vrp.range_node = NULL;
485 node->vrp.range_top = new_range_top;
486 node->vrp.range_bottom = new_range_bottom;
489 } else if (node->vrp.range_type == VRP_ANTIRANGE) {
490 if (new_range_type == VRP_ANTIRANGE) {
491 if ((new_range_node == NULL && node->vrp.range_node == NULL) ||
492 (new_range_node == node->vrp.range_node &&
493 new_range_op == node->vrp.range_op)) {
494 if (tarval_cmp(node->vrp.range_bottom, new_range_bottom) == pn_Cmp_Gt) {
495 something_changed = 1;
496 node->vrp.range_bottom = new_range_bottom;
498 if (tarval_cmp(node->vrp.range_top, new_range_top) == pn_Cmp_Lt) {
499 something_changed = 1;
500 node->vrp.range_top = new_range_top;
504 /* prefer the absolute value*/
505 if (new_range_node == NULL && node->vrp.range_node != NULL) {
506 something_changed = 1;
507 node->vrp.range_node = NULL;
508 node->vrp.range_top = new_range_top;
509 node->vrp.range_bottom = new_range_bottom;
513 if (new_range_type == VRP_RANGE) {
514 if ((new_range_node == NULL && node->vrp.range_node == NULL) ||
515 (new_range_node == node->vrp.range_node &&
516 new_range_op == node->vrp.range_op)) {
517 if (tarval_cmp(node->vrp.range_bottom, new_range_top) == pn_Cmp_Gt) {
518 something_changed = 1;
519 node->vrp.range_bottom = new_range_top;
521 if (tarval_cmp(node->vrp.range_top, new_range_bottom) == pn_Cmp_Lt) {
522 something_changed = 1;
523 node->vrp.range_top = new_range_bottom;
527 /* prefer the absolute value*/
528 if (new_range_node == NULL && node->vrp.range_node != NULL) {
529 something_changed = 1;
530 node->vrp.range_node = NULL;
531 node->vrp.range_top = new_range_top;
532 node->vrp.range_bottom = new_range_bottom;
537 assert(tarval_is_null(
538 tarval_and(node->vrp.bits_set, node->vrp.bits_not_set)));
540 return something_changed;
543 void vrp_first_pass(ir_node *n, void *e)
546 worklist_t *tmp_entry;
548 struct vrp_env_t *env = e;
553 set_irn_link(n, VISITED);
557 for (i = get_irn_n_outs(n) - 1; i >=0; --i) {
558 succ = get_irn_out(n, i);
559 if (get_irn_link(succ) == VISITED) {
562 tmp_entry = XMALLOC(worklist_t);
564 list_add(&(tmp_entry->nodes), &(env->worklist->nodes));
572 void set_vrp_data(ir_graph *irg)
578 worklist_t *tmp_entry, *tmp_entry2;
579 struct vrp_env_t env;
586 assure_irg_outs(irg); /* ensure that out edges are consistent*/
588 /* edges_activate(irg);*/
590 INIT_LIST_HEAD(&worklist.nodes);
592 env.worklist = &worklist;
593 irg_walk_graph(irg, NULL, vrp_first_pass, &env);
597 /* while there are entries in the worklist, continue*/
598 while ( !list_empty(&worklist.nodes) ) {
600 list_head *pos, *next;
601 list_for_each_safe(pos, next, &worklist.nodes) {
603 tmp_entry = list_entry(pos, worklist_t, nodes);
605 if (update_vrp_data(tmp_entry->node)) {
606 /* if something changed, add successors to worklist*/
607 for (i = get_irn_n_outs(tmp_entry->node) - 1; i >=0; --i) {
608 succ = get_irn_out(tmp_entry->node, i);
610 tmp_entry2 = XMALLOC(worklist_t);
611 tmp_entry2->node = succ;
612 list_add(&(tmp_entry2->nodes), &worklist.nodes);
623 ir_graph_pass_t *set_vrp_pass(const char *name)
625 return def_graph_pass(name ? name : "set_vrp", set_vrp_data);
628 pn_Cmp vrp_cmp(ir_node *left, ir_node *right)
630 if (!left->vrp.valid || !right->vrp.valid) {
634 if (left->vrp.range_type == VRP_RANGE && right->vrp.range_type == VRP_RANGE) {
635 if (tarval_cmp(left->vrp.range_top, right->vrp.range_bottom) == pn_Cmp_Lt) {
638 if (tarval_cmp(left->vrp.range_bottom, right->vrp.range_top) == pn_Cmp_Gt) {
643 if (!tarval_is_null(tarval_and(left->vrp.bits_set, right->vrp.bits_not_set)) ||
644 !tarval_is_null(tarval_and(left->vrp.bits_not_set, right->vrp.bits_set))) {
647 /* TODO: We can get way more information here*/