/**
* Create an automatic array which will be deleted at return from function.
- * Beware, the data will be allocated un the functions stack!
+ * Beware, the data will be allocated on the function stack!
*
* @param type The element type of the new array.
* @param var A lvalue of type (type *) which will hold the new array.
/**
* "eset" is a set of addresses. The addresses are used for element
* compare and hash calculation.
- * The value "NULL" could not be stored, as it is used as internal sentinel.
+ * The value "NULL" can not be stored, as it is used as internal sentinel.
*/
typedef struct eset eset;
eset *eset_create(void);
/**
- * Creates a copy of the given set. Did NOT work if NULL is contained in source. */
+ * Creates a copy of the given set. Does NOT work if NULL is contained in source. */
eset *eset_copy(eset *source);
/** Deletes a set. */
#include "firm_config.h"
-#define _FIRM_FNV_OFFSET_BASIS 2166136261
-#define _FIRM_FNV_FNV_PRIME 16777619
+#define _FIRM_FNV_OFFSET_BASIS 2166136261U
+#define _FIRM_FNV_FNV_PRIME 16777619U
static INLINE unsigned firm_fnv_hash(const unsigned char *data, unsigned bytes)
{
construct_cf_backedges(current_ir_graph);
-
l = get_irg_loop(current_ir_graph);
construct_interval_edges(l);
*/
/**
-* @file interval_analysis.h
-*
-* Decompost control flow graph into acylic, hierarchic intervals.
-*
-* @author Goetz Lindenmaier
-*
-* The analysis is based on the control flow looptree. An intervall are basically
-* all nodes in a single ir_loop entry, i.e., basic blocks and inner loop nodes.
-* The analysis computes a new set of edges that link all nodes of a loop to an
-* acyclic graph.
-*
-*
-*
-*/
+ * @file interval_analysis.h
+ *
+ * Decompost control flow graph into acylic, hierarchic intervals.
+ *
+ * @author Goetz Lindenmaier
+ *
+ * The analysis is based on the control flow looptree. An intervall are basically
+ * all nodes in a single ir_loop entry, i.e., basic blocks and inner loop nodes.
+ * The analysis computes a new set of edges that link all nodes of a loop to an
+ * acyclic graph.
+ *
+ *
+ *
+ */
#ifndef _INTERVAL_ANALYSIS_H_
#define _INTERVAL_ANALYSIS_H_
*
* This number is useful for evaluation of execution frequencies.
*/
-int get_region_n_outs(void *region);
+int get_region_n_outs(void *region);
int get_region_n_exc_outs(void *region);
/** The control flow operation corresponding to the loop-region in at
void *get_loop_cfop(void *region, int pos);
-
-
/** The algorithm to construct the interval graph.
+ *
+ * Constructs the cf loop tree and leaves a valid version of it.
*
* @todo: @@@ add flag that states correctness of interval analysis information
* to irg.
**/
-
void construct_intervals(ir_graph *irg);
+
/** frees interval information of all graphs. */
void free_intervals(void);
/** For dominator information */
typedef struct dom_info {
struct ir_node *idom; /**< immediate CFG dominator */
- struct ir_node *next; /**< The next node in the dominated
- list of @c idom. */
- struct ir_node *first; /**< The first node in the list of nodes
- this nodes dominates immediately. */
- int tree_pre_num; /**< The pre-order number from a dfs walk
- over the dominator tree. */
- int max_subtree_pre_num; /**< The largest tree pre num found in the
- dominator subtree of this node. */
+ struct ir_node *next; /**< The next node in the dominated
+ list of @c idom. */
+ struct ir_node *first; /**< The first node in the list of nodes
+ this nodes dominates immediately. */
+ int tree_pre_num; /**< The pre-order number from a dfs walk
+ over the dominator tree. */
+ int max_subtree_pre_num; /**< The largest tree pre num found in the
+ dominator subtree of this node. */
int pre_num; /**< pre-order graph-walk number */
int dom_depth; /**< depth in dominator-tree */
} dom_info;
topdir = ../..
subdir := ir/common
-INSTALL_HEADERS := firm_common.h firm.h firmwalk.h statistics.h old_fctnames.h debug.h
+INSTALL_HEADERS := firm_common.h firm.h firmwalk.h statistics.h debug.h
SOURCES = $(INSTALL_HEADERS)
SOURCES += Makefile.in \
panic.c firm_common.c firm.c firmwalk.c \
- panic.h firm_common_t.h statistics.c debug.c
+ panic.h firm_common_t.h statistics.c debug.c old_fctnames.h
include $(topdir)/MakeRules
* Further it collects all Proj nodes in a list of the node producing
* the tuple. In case of nested tuples the Projs are collected in the
* node producing the outermost Tuple.
+ * All other link fields are cleared afterwards.
*/
void collect_phiprojs(ir_graph *irg);
*
* @param n The node to be copied
* @param env if non-NULL, the node number attribute will be copied to the new node
+ *
+ * Note: Also used for loop unrolling.
*/
-static void
-copy_node (ir_node *n, void *env) {
+void copy_node (ir_node *n, void *env) {
ir_node *nn, *block;
int new_arity;
opcode op = get_irn_opcode(n);
Assumes that current_ir_graph is set to the graph containing "node".
"in" must contain all predecessors except the block that are required for
the nodes opcode. */
-void set_irn_in (ir_node *node, int arity,
- ir_node *in[]);
+void set_irn_in (ir_node *node, int arity, ir_node *in[]);
+
/* to iterate through the predecessors without touching the array. No
order of predecessors guaranteed.
To iterate over the operands iterate from 0 to i < get_irn_arity(),
int is_ip_cfop(const ir_node *node);
/** Returns true if the operation can change the control flow because
of an exception: Call, Quot, DivMod, Div, Mod, Load, Store, Alloc,
- Bad. */
+ Bad. Raise is not fragile, but a unconditional jump. */
int is_fragile_op(const ir_node *node);
/** Returns the memory operand of fragile operations. */
ir_node *get_fragile_op_mem(ir_node *node);
/** EndReg/EndExcept attributes */
typedef struct {
char dummy;
- /* ir_graph * irg; */ /**< ir_graph this node belongs to (for */
- /* * navigating in interprocedural graphs) */
- /* @@@ now in block */
} end_attr;
/** CallBegin attributes */
typedef struct {
- /* ir_graph * irg; */ /**< ir_graph this node belongs to (for */
- /* * navigating in interprocedural graphs) */
- /* @@@ now in block */
ir_node * call; /**< associated Call-operation */
} callbegin_attr;
node takes the role of the obsolete Phi0 node,
therefore the name. */
int *phi_backedge; /**< For Phi after construction.
- Field n set to true if pred n is backedge.
- @todo Ev. replace by bitfield! */
+ Field n set to true if pred n is backedge.
+ @todo Ev. replace by bitfield! */
long proj; /**< For Proj: contains the result position to project */
confirm_attr confirm_cmp; /**< For Confirm: compare operation */
filter_attr filter; /**< For Filter */
*/
static INLINE void *
__get_irn_link(const ir_node *node) {
- assert (node);
+ assert (node && is_ir_node(node));
return node->link;
}
*/
static INLINE op_pin_state
__get_irn_pinned(const ir_node *node) {
+ assert(node && is_ir_node(node));
op_pin_state state = __get_op_pinned(__get_irn_op(node));
if (state >= op_pin_state_exc_pinned)
return get_opt_fragile_ops() ? node->attr.except.pin_state : op_pin_state_pinned;
static INLINE int
__is_unop(const ir_node *node) {
+ assert(node && is_ir_node(node));
return (node->op->opar == oparity_unary);
}
static INLINE int
__is_binop(const ir_node *node) {
+ assert(node && is_ir_node(node));
return (node->op->opar == oparity_binary);
}
static INLINE int
__is_no_Block(const ir_node *node) {
- assert(node);
+ assert(node && is_ir_node(node));
return (__get_irn_op(node) != op_Block);
}
static INLINE int
__is_Block(const ir_node *node) {
- assert(node);
+ assert(node && is_ir_node(node));
return (__get_irn_op(node) == op_Block);
}
and replacing the control flow by Bad. */
if (get_irn_mode(node) == mode_X) {
ir_node *block = get_nodes_block(node);
+ if (!get_Block_matured(block)) return node; /* Don't optimize nodes in immature blocks. */
if (op == op_End) return node; /* Don't optimize End, may have Bads. */
if (get_irn_op(block) == op_Block && get_Block_matured(block)) {