2 * This file is part of libFirm.
3 * Copyright (C) 2012 University of Karlsruhe.
8 * @brief read/write analyze of graph argument, which have mode reference.
9 * @author Beyhan Veliev
22 #include "analyze_irg_args.h"
25 * Walk recursive the successors of a graph argument
26 * with mode reference and mark if it will be read,
29 * @param arg The graph argument with mode reference,
30 * that must be checked.
32 static ptr_access_kind analyze_arg(ir_node *arg, ptr_access_kind bits)
37 /* We must visit a node once to avoid endless recursion.*/
38 mark_irn_visited(arg);
40 for (i = get_irn_n_outs(arg) - 1; i >= 0; --i) {
41 succ = get_irn_out(arg, i);
44 if (irn_visited(succ))
47 /* We should not walk over the memory edge.*/
48 if (get_irn_mode(succ) == mode_M)
51 /* If we reach with the recursion a Call node and our reference
52 isn't the address of this Call we accept that the reference will
53 be read and written if the graph of the method represented by
54 "Call" isn't computed else we analyze that graph. If our
55 reference is the address of this
56 Call node that mean the reference will be read.*/
57 switch (get_irn_opcode(succ)) {
60 ir_node *ptr = get_Call_ptr(succ);
63 /* Hmm: not sure what this is, most likely a read */
64 bits |= ptr_access_read;
68 if (is_SymConst_addr_ent(ptr)) {
69 meth_ent = get_SymConst_entity(ptr);
71 for (p = get_Call_n_params(succ) - 1; p >= 0; --p) {
72 if (get_Call_param(succ, p) == arg) {
73 /* an arg can be used more than once ! */
74 bits |= get_method_param_access(meth_ent, p);
77 } else if (is_Sel(ptr) && get_irp_callee_info_state() == irg_callee_info_consistent) {
78 /* is be a polymorphic call but callee information is available */
79 int n_params = get_Call_n_params(succ);
82 /* simply look into ALL possible callees */
83 for (c = get_Call_n_callees(succ) - 1; c >= 0; --c) {
84 meth_ent = get_Call_callee(succ, c);
86 /* unknown_entity is used to signal that we don't know what is called */
87 if (is_unknown_entity(meth_ent)) {
88 bits |= ptr_access_all;
92 for (p = n_params - 1; p >= 0; --p) {
93 if (get_Call_param(succ, p) == arg) {
94 /* an arg can be used more than once ! */
95 bits |= get_method_param_access(meth_ent, p);
99 } else /* can do anything */
100 bits |= ptr_access_all;
103 /* search stops here anyway */
107 /* We have reached a Store node => the reference is written or stored. */
108 if (get_Store_ptr(succ) == arg) {
110 bits |= ptr_access_write;
113 bits |= ptr_access_store;
116 /* search stops here anyway */
120 /* We have reached a Load node => the reference is read. */
121 bits |= ptr_access_read;
123 /* search stops here anyway */
127 /* our address is casted into something unknown. Break our search. */
128 bits = ptr_access_all;
135 /* If we know that, the argument will be read, write and stored, we
136 can break the recursion.*/
137 if (bits == ptr_access_all) {
138 bits = ptr_access_all;
143 * A calculation that do not lead to a reference mode ends our search.
144 * This is dangerous: It would allow to cast into integer and that cast back ...
145 * so, when we detect a Conv we go mad, see the Conv case above.
147 if (!mode_is_reference(get_irn_mode(succ)))
150 /* follow further the address calculation */
151 bits = analyze_arg(succ, bits);
153 set_irn_link(arg, NULL);
158 * Check if a argument of the ir graph with mode
159 * reference is read, write or both.
161 * @param irg The ir graph to analyze.
163 static void analyze_ent_args(ir_entity *ent)
166 ir_node *irg_args, *arg;
171 ptr_access_kind *rw_info;
173 mtp = get_entity_type(ent);
174 nparams = get_method_n_params(mtp);
176 ent->attr.mtd_attr.param_access = NEW_ARR_F(ptr_access_kind, nparams);
178 /* If the method haven't parameters we have
184 irg = get_entity_irg(ent);
186 /* we have not yet analyzed the graph, set ALL access for pointer args */
187 for (i = nparams - 1; i >= 0; --i) {
188 ir_type *type = get_method_param_type(mtp, i);
189 ent->attr.mtd_attr.param_access[i] = is_Pointer_type(type) ? ptr_access_all : ptr_access_none;
193 /* no graph, no better info */
197 assure_irg_outs(irg);
199 irg_args = get_irg_args(irg);
201 /* A array to save the information for each argument with
203 NEW_ARR_A(ptr_access_kind, rw_info, nparams);
205 /* We initialize the element with none state. */
206 for (i = nparams - 1; i >= 0; --i)
207 rw_info[i] = ptr_access_none;
209 /* search for arguments with mode reference
211 for (i = get_irn_n_outs(irg_args) - 1; i >= 0; --i) {
212 arg = get_irn_out(irg_args, i);
213 arg_mode = get_irn_mode(arg);
214 proj_nr = get_Proj_proj(arg);
216 if (mode_is_reference(arg_mode))
217 rw_info[proj_nr] |= analyze_arg(arg, rw_info[proj_nr]);
220 /* copy the temporary info */
221 memcpy(ent->attr.mtd_attr.param_access, rw_info,
222 nparams * sizeof(ent->attr.mtd_attr.param_access[0]));
225 void analyze_irg_args(ir_graph *irg)
229 if (irg == get_const_code_irg())
232 ent = get_irg_entity(irg);
236 if (! ent->attr.mtd_attr.param_access)
237 analyze_ent_args(ent);
240 ptr_access_kind get_method_param_access(ir_entity *ent, size_t pos)
243 ir_type *mtp = get_entity_type(ent);
244 int is_variadic = get_method_variadicity(mtp) == variadicity_variadic;
246 assert(is_variadic || pos < get_method_n_params(mtp));
249 if (ent->attr.mtd_attr.param_access) {
250 if (pos < ARR_LEN(ent->attr.mtd_attr.param_access))
251 return ent->attr.mtd_attr.param_access[pos];
253 return ptr_access_all;
256 analyze_ent_args(ent);
258 if (pos < ARR_LEN(ent->attr.mtd_attr.param_access))
259 return ent->attr.mtd_attr.param_access[pos];
261 return ptr_access_all;
264 /* Weights for parameters */
266 null_weight = 0, /**< If can't be anything optimized. */
267 binop_weight = 1, /**< If the argument have mode_weight and take part in binop. */
268 const_binop_weight = 1, /**< If the argument have mode_weight and take part in binop with a constant.*/
269 cmp_weight = 4, /**< If the argument take part in cmp. */
270 const_cmp_weight = 10, /**< If the argument take part in cmp with a constant. */
271 indirect_call_weight = 125, /**< If the argument is the address of an indirect Call. */
275 * Compute the weight of a method parameter
277 * @param arg The parameter them weight muss be computed.
279 static unsigned calc_method_param_weight(ir_node *arg)
283 unsigned weight = null_weight;
285 /* We mark the nodes to avoid endless recursion */
286 mark_irn_visited(arg);
288 for (i = get_irn_n_outs(arg) - 1; i >= 0; i--) {
289 succ = get_irn_out(arg, i);
292 if (irn_visited(succ))
295 /* We should not walk over the memory edge.*/
296 if (get_irn_mode(succ) == mode_M)
299 switch (get_irn_opcode(succ)) {
301 if (get_Call_ptr(succ) == arg) {
302 /* the arguments is used as an pointer input for a call,
303 we can probably change an indirect Call into a direct one. */
304 weight += indirect_call_weight;
308 /* We have reached a cmp and we must increase the
309 weight with the cmp_weight. */
310 if (get_Cmp_left(succ) == arg)
311 op = get_Cmp_right(succ);
313 op = get_Cmp_left(succ);
315 if (is_irn_constlike(op)) {
316 weight += const_cmp_weight;
318 weight += cmp_weight;
321 /* the argument is used for a SwitchCond, a big win */
322 weight += const_cmp_weight * get_irn_n_outs(succ);
325 /* when looking backward we might find Id nodes */
326 weight += calc_method_param_weight(succ);
329 /* unoptimized tuple */
330 for (j = get_Tuple_n_preds(succ) - 1; j >= 0; --j) {
331 ir_node *pred = get_Tuple_pred(succ, j);
333 /* look for Proj(j) */
334 for (k = get_irn_n_outs(succ) - 1; k >= 0; --k) {
335 ir_node *succ_succ = get_irn_out(succ, k);
336 if (is_Proj(succ_succ)) {
337 if (get_Proj_proj(succ_succ) == j) {
339 weight += calc_method_param_weight(succ_succ);
342 /* this should NOT happen */
349 if (is_binop(succ)) {
350 /* We have reached a BinOp and we must increase the
351 weight with the binop_weight. If the other operand of the
352 BinOp is a constant we increase the weight with const_binop_weight
353 and call the function recursive.
355 if (get_binop_left(succ) == arg)
356 op = get_binop_right(succ);
358 op = get_binop_left(succ);
360 if (is_irn_constlike(op)) {
361 weight += const_binop_weight;
362 weight += calc_method_param_weight(succ);
364 weight += binop_weight;
365 } else if (is_unop(succ)) {
366 /* We have reached a binop and we must increase the
367 weight with the const_binop_weight and call the function recursive.*/
368 weight += const_binop_weight;
369 weight += calc_method_param_weight(succ);
374 set_irn_link(arg, NULL);
379 * Calculate a weight for each argument of an entity.
381 * @param ent The entity of the ir_graph.
383 static void analyze_method_params_weight(ir_entity *ent)
387 int nparams, i, proj_nr;
388 ir_node *irg_args, *arg;
390 mtp = get_entity_type(ent);
391 nparams = get_method_n_params(mtp);
393 /* allocate a new array. currently used as 'analysed' flag */
394 ent->attr.mtd_attr.param_weight = NEW_ARR_F(unsigned, nparams);
396 /* If the method haven't parameters we have nothing to do. */
400 /* First we initialize the parameter weights with 0. */
401 for (i = nparams - 1; i >= 0; i--)
402 ent->attr.mtd_attr.param_weight[i] = null_weight;
404 irg = get_entity_irg(ent);
406 /* no graph, no better info */
410 /* Call algorithm that computes the out edges */
411 assure_irg_outs(irg);
413 irg_args = get_irg_args(irg);
414 for (i = get_irn_n_outs(irg_args) - 1; i >= 0; --i) {
415 arg = get_irn_out(irg_args, i);
416 proj_nr = get_Proj_proj(arg);
417 ent->attr.mtd_attr.param_weight[proj_nr] += calc_method_param_weight(arg);
421 unsigned get_method_param_weight(ir_entity *ent, size_t pos)
423 if (ent->attr.mtd_attr.param_weight) {
424 if (pos < ARR_LEN(ent->attr.mtd_attr.param_weight))
425 return ent->attr.mtd_attr.param_weight[pos];
430 analyze_method_params_weight(ent);
432 if (pos < ARR_LEN(ent->attr.mtd_attr.param_weight))
433 return ent->attr.mtd_attr.param_weight[pos];
438 void analyze_irg_args_weight(ir_graph *irg)
440 ir_entity *entity = get_irg_entity(irg);
444 assert(is_method_entity(entity));
445 if (entity->attr.mtd_attr.param_weight != NULL)
448 ir_reserve_resources(irg, IR_RESOURCE_IRN_VISITED);
449 inc_irg_visited(irg);
450 analyze_method_params_weight(entity);
451 ir_free_resources(irg, IR_RESOURCE_IRN_VISITED);