-/* Copyright (C) 1998 - 2000 by Universitaet Karlsruhe
-* All rights reserved.
-*
-* Authors: Christian Schaefer, Goetz Lindenmaier
-*
-* iropt --- optimizations intertwined with IR construction.
-*/
-
-/* $Id$ */
+/*
+ * Project: libFIRM
+ * File name: ir/ir/iropt.c
+ * Purpose: iropt --- optimizations intertwined with IR construction.
+ * Author: Christian Schaefer
+ * Modified by: Goetz Lindenmaier
+ * Created:
+ * CVS-ID: $Id$
+ * Copyright: (c) 1998-2003 Universität Karlsruhe
+ * Licence: This file protected by GPL - GNU GENERAL PUBLIC LICENSE.
+ */
#ifdef HAVE_CONFIG_H
# include <config.h>
# include "irgmod.h"
# include "irvrfy.h"
# include "tv.h"
-# include "tune.h"
# include "dbginfo_t.h"
# include "iropt_dbg.h"
break;
case iro_Not:
if ((ta != tarval_bad)) {
- res = tarval_neg (ta);
+ res = tarval_not (ta);
}
break;
case iro_Shl:
case iro_Mod: {
ta = computed_value(n);
if (ta != tarval_bad) {
- /* Turn Div into a tuple (mem, bad, value) */
+ /* Turn Mod into a tuple (mem, bad, value) */
ir_node *mem = get_Mod_mem(n);
turn_into_tuple(n, 3);
set_Tuple_pred(n, 0, mem);
/* **************** Common Subexpression Elimination **************** */
+/** The size of the hash table used, should estimate the number of nodes
+ in a graph. */
+#define N_IR_NODES 512
+
/* Compare function for two nodes in the hash table. Gets two */
/* nodes as parameters. Returns 0 if the nodes are a cse. */
static int
switch (get_irn_opcode(a)) {
case iro_Const:
- return get_irn_const_attr (a) != get_irn_const_attr (b);
+ return (get_Const_tarval(a) != get_Const_tarval(b))
+ || (get_Const_type(a) != get_Const_type(b));
case iro_Proj:
return get_irn_proj_attr (a) != get_irn_proj_attr (b);
case iro_Filter:
|| (get_irn_sel_attr(a).ent->type != get_irn_sel_attr(b).ent->type);
case iro_Phi:
return get_irn_phi_attr (a) != get_irn_phi_attr (b);
+ case iro_Cast:
+ return get_Cast_type(a) != get_Cast_type(b);
default: ;
}
pset *
new_identities (void)
{
- return new_pset (vt_cmp, TUNE_NIR_NODES);
+ return new_pset (vt_cmp, N_IR_NODES);
}
void
int i;
ir_op* op = get_irn_op(node);
+#if 1
+ /* remove garbage blocks by looking at control flow that leaves the block
+ and replacing the control flow by Bad. */
+ if (get_irn_mode(node) == mode_X) {
+ ir_node *block = get_nodes_block(node);
+ if (get_irn_op(block) == op_Block && get_Block_matured(block)) {
+ for (i = 0; i < get_irn_arity(block); i++) {
+ if (!is_Bad(get_irn_n(block, i))) break;
+ }
+ if (i == get_irn_arity(block)) return new_Bad();
+ }
+ }
+#endif
+
/* Blocks, Phis and Tuples may have dead inputs, e.g., if one of the
blocks predecessors is dead. */
if ( op != op_Block && op != op_Phi && op != op_Tuple) {
}
}
#if 0
+ /* With this code we violate the agreement that local_optimize
+ only leaves Bads in Block, Phi and Tuple nodes. */
/* If Block has only Bads as predecessors it's garbage. */
/* If Phi has only Bads as predecessors it's garbage. */
- if (op == op_Block || op == op_Phi) {
+ if ((op == op_Block && get_Block_matured(node)) || op == op_Phi) {
for (i = 0; i < get_irn_arity(node); i++) {
if (!is_Bad(get_irn_n(node, i))) break;
}
- if (i = get_irn_arity(node)) node = new_Bad();
+ if (i == get_irn_arity(node)) node = new_Bad();
}
#endif
return node;