1 #include "gen_sparc_regalloc_if.h"
4 ir_op *op_sparc_SubSP = NULL;
5 ir_op *op_sparc_Add = NULL;
6 ir_op *op_sparc_FrameAddr = NULL;
7 ir_op *op_sparc_Store = NULL;
8 ir_op *op_sparc_Branch = NULL;
9 ir_op *op_sparc_Mov = NULL;
10 ir_op *op_sparc_Tst = NULL;
11 ir_op *op_sparc_SwitchJmp = NULL;
12 ir_op *op_sparc_Cmp = NULL;
13 ir_op *op_sparc_SymConst = NULL;
14 ir_op *op_sparc_Sub = NULL;
15 ir_op *op_sparc_Load = NULL;
16 ir_op *op_sparc_AddSP = NULL;
18 ir_op *get_op_sparc_SubSP(void) { return op_sparc_SubSP; }
19 int is_sparc_SubSP(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_SubSP; }
21 ir_op *get_op_sparc_Add(void) { return op_sparc_Add; }
22 int is_sparc_Add(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Add; }
24 ir_op *get_op_sparc_FrameAddr(void) { return op_sparc_FrameAddr; }
25 int is_sparc_FrameAddr(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_FrameAddr; }
27 ir_op *get_op_sparc_Store(void) { return op_sparc_Store; }
28 int is_sparc_Store(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Store; }
30 ir_op *get_op_sparc_Branch(void) { return op_sparc_Branch; }
31 int is_sparc_Branch(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Branch; }
33 ir_op *get_op_sparc_Mov(void) { return op_sparc_Mov; }
34 int is_sparc_Mov(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Mov; }
36 ir_op *get_op_sparc_Tst(void) { return op_sparc_Tst; }
37 int is_sparc_Tst(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Tst; }
39 ir_op *get_op_sparc_SwitchJmp(void) { return op_sparc_SwitchJmp; }
40 int is_sparc_SwitchJmp(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_SwitchJmp; }
42 ir_op *get_op_sparc_Cmp(void) { return op_sparc_Cmp; }
43 int is_sparc_Cmp(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Cmp; }
45 ir_op *get_op_sparc_SymConst(void) { return op_sparc_SymConst; }
46 int is_sparc_SymConst(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_SymConst; }
48 ir_op *get_op_sparc_Sub(void) { return op_sparc_Sub; }
49 int is_sparc_Sub(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Sub; }
51 ir_op *get_op_sparc_Load(void) { return op_sparc_Load; }
52 int is_sparc_Load(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_Load; }
54 ir_op *get_op_sparc_AddSP(void) { return op_sparc_AddSP; }
55 int is_sparc_AddSP(const ir_node *n) { return get_sparc_irn_opcode(n) == iro_sparc_AddSP; }
59 static int sparc_opcode_start = -1;
60 static int sparc_opcode_end = -1;
63 /** A tag for the sparc opcodes. Note that the address is used as a tag value, NOT the FOURCC code. */
64 #define sparc_op_tag FOURCC('S', 'P', 'A', 'R')
66 /** Return the opcode number of the first sparc opcode. */
67 int get_sparc_opcode_first(void) {
68 return sparc_opcode_start;
71 /** Return the opcode number of the last sparc opcode + 1. */
72 int get_sparc_opcode_last(void) {
73 return sparc_opcode_end;
76 /** Return 1 if the given opcode is a sparc machine op, 0 otherwise */
77 int is_sparc_op(const ir_op *op) {
78 return get_op_tag(op) == sparc_op_tag;
81 /** Return 1 if the given node is a sparc machine node, 0 otherwise */
82 int is_sparc_irn(const ir_node *node) {
83 return is_sparc_op(get_irn_op(node));
86 int get_sparc_irn_opcode(const ir_node *node) {
87 if (is_sparc_irn(node))
88 return get_irn_opcode(node) - sparc_opcode_start;
95 #define BIT(x) (1 << (x % 32))
97 static const unsigned sparc_limit_gp_sp[] = { BIT(REG_SP), 0 };
99 static const arch_register_req_t sparc_requirements_gp_sp = {
100 arch_register_req_type_limited,
101 & sparc_reg_classes[CLASS_sparc_gp],
104 0 /* different pos */
108 static const arch_register_req_t sparc_requirements_gp_gp = {
109 arch_register_req_type_normal,
110 & sparc_reg_classes[CLASS_sparc_gp],
111 NULL, /* limit bitset */
113 0 /* different pos */
117 static const arch_register_req_t sparc_requirements_gp_sp_I_S = {
118 arch_register_req_type_ignore | arch_register_req_type_produces_sp | arch_register_req_type_limited,
119 & sparc_reg_classes[CLASS_sparc_gp],
122 0 /* different pos */
126 static const arch_register_req_t sparc_requirements__none = {
127 arch_register_req_type_none,
129 NULL, /* limit bitset */
131 0 /* different pos */
135 static const arch_register_req_t sparc_requirements_flags_flags = {
136 arch_register_req_type_normal,
137 & sparc_reg_classes[CLASS_sparc_flags],
138 NULL, /* limit bitset */
140 0 /* different pos */
148 ir_node *new_bd_sparc_SubSP(dbg_info *dbgi, ir_node *block, ir_node *stack, ir_node *size, ir_node *mem)
151 ir_op *op = op_sparc_SubSP;
153 backend_info_t *info;
157 ir_mode *mode = mode_T;
158 static const be_execution_unit_t ***exec_units = NULL;
159 static const arch_register_req_t *in_reqs[] =
161 & sparc_requirements_gp_sp,
162 & sparc_requirements_gp_gp,
163 & sparc_requirements__none,
166 /* construct in array */
173 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
175 /* init node attributes */
176 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
178 info = be_get_info(res);
179 info->out_infos[0].req = &sparc_requirements_gp_sp_I_S;
180 info->out_infos[1].req = &sparc_requirements__none;
184 res = optimize_node(res);
185 irn_vrfy_irg(res, current_ir_graph);
193 ir_node *new_bd_sparc_Add_imm(dbg_info *dbgi, ir_node *block, ir_node *left, int immediate_value)
196 ir_op *op = op_sparc_Add;
198 backend_info_t *info;
202 ir_mode *mode = mode_Iu;
203 static const be_execution_unit_t ***exec_units = NULL;
204 static const arch_register_req_t *in_reqs[] =
206 & sparc_requirements_gp_gp,
209 /* construct in array */
213 flags |= arch_irn_flags_rematerializable;
217 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
219 /* init node attributes */
220 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
221 sparc_set_attr_imm(res, immediate_value);
222 info = be_get_info(res);
223 info->out_infos[0].req = &sparc_requirements_gp_gp;
227 res = optimize_node(res);
228 irn_vrfy_irg(res, current_ir_graph);
236 ir_node *new_bd_sparc_Add_reg(dbg_info *dbgi, ir_node *block, ir_node *left, ir_node *right)
239 ir_op *op = op_sparc_Add;
241 backend_info_t *info;
245 ir_mode *mode = mode_Iu;
246 static const be_execution_unit_t ***exec_units = NULL;
247 static const arch_register_req_t *in_reqs[] =
249 & sparc_requirements_gp_gp,
250 & sparc_requirements_gp_gp,
253 /* construct in array */
258 flags |= arch_irn_flags_rematerializable;
262 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
264 /* init node attributes */
265 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
267 info = be_get_info(res);
268 info->out_infos[0].req = &sparc_requirements_gp_gp;
272 res = optimize_node(res);
273 irn_vrfy_irg(res, current_ir_graph);
279 * construct FrameAddr node
281 ir_node *new_bd_sparc_FrameAddr(dbg_info *dbgi, ir_node *block, ir_node *base, ir_entity *entity)
284 ir_op *op = op_sparc_FrameAddr;
286 backend_info_t *info;
290 ir_mode *mode = mode_Iu;
291 static const be_execution_unit_t ***exec_units = NULL;
292 static const arch_register_req_t *in_reqs[] =
294 & sparc_requirements_gp_gp,
297 /* construct in array */
301 flags |= arch_irn_flags_rematerializable;
305 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
307 /* init node attributes */
308 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
309 init_sparc_symconst_attributes(res, entity);
311 info = be_get_info(res);
312 info->out_infos[0].req = &sparc_requirements_gp_gp;
316 res = optimize_node(res);
317 irn_vrfy_irg(res, current_ir_graph);
323 * construct Store: Store(ptr, val, mem) = ST ptr,val
325 ir_node *new_bd_sparc_Store(dbg_info *dbgi, ir_node *block, ir_node *ptr, ir_node *val, ir_node *mem, ir_mode *ls_mode, ir_entity *entity, int entity_sign, long offset, bool is_frame_entity)
328 ir_op *op = op_sparc_Store;
330 backend_info_t *info;
334 ir_mode *mode = mode_M;
335 static const be_execution_unit_t ***exec_units = NULL;
336 static const arch_register_req_t *in_reqs[] =
338 & sparc_requirements_gp_gp,
339 & sparc_requirements_gp_gp,
340 & sparc_requirements__none,
343 /* construct in array */
350 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
352 /* init node attributes */
353 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
354 init_sparc_load_store_attributes(res, ls_mode, entity, entity_sign, offset, is_frame_entity);
356 info = be_get_info(res);
357 info->out_infos[0].req = &sparc_requirements__none;
361 res = optimize_node(res);
362 irn_vrfy_irg(res, current_ir_graph);
368 * construct Branch node
370 ir_node *new_bd_sparc_Branch(dbg_info *dbgi, ir_node *block, ir_node *op0, int proj_num)
373 ir_op *op = op_sparc_Branch;
375 backend_info_t *info;
379 ir_mode *mode = mode_T;
380 static const be_execution_unit_t ***exec_units = NULL;
381 static const arch_register_req_t *in_reqs[] =
383 & sparc_requirements_flags_flags,
385 sparc_jmp_cond_attr_t *attr;
387 /* construct in array */
392 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
394 /* init node attributes */
395 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
397 info = be_get_info(res);
398 info->out_infos[0].req = &sparc_requirements__none;
399 info->out_infos[1].req = &sparc_requirements__none;
402 attr = get_irn_generic_attr(res);
403 set_sparc_jmp_cond_proj_num(res, proj_num);
405 res = optimize_node(res);
406 irn_vrfy_irg(res, current_ir_graph);
414 ir_node *new_bd_sparc_Mov_imm(dbg_info *dbgi, ir_node *block, int immediate_value)
417 ir_op *op = op_sparc_Mov;
419 backend_info_t *info;
423 ir_mode *mode = mode_Iu;
424 static const be_execution_unit_t ***exec_units = NULL;
425 static const arch_register_req_t **in_reqs = NULL;
428 flags |= arch_irn_flags_rematerializable;
432 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
434 /* init node attributes */
435 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
436 sparc_set_attr_imm(res, immediate_value);
437 info = be_get_info(res);
438 info->out_infos[0].req = &sparc_requirements_gp_gp;
442 res = optimize_node(res);
443 irn_vrfy_irg(res, current_ir_graph);
451 ir_node *new_bd_sparc_Mov_reg(dbg_info *dbgi, ir_node *block, ir_node *op0)
454 ir_op *op = op_sparc_Mov;
456 backend_info_t *info;
460 ir_mode *mode = mode_Iu;
461 static const be_execution_unit_t ***exec_units = NULL;
462 static const arch_register_req_t *in_reqs[] =
464 & sparc_requirements_gp_gp,
467 /* construct in array */
471 flags |= arch_irn_flags_rematerializable;
475 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
477 /* init node attributes */
478 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
480 info = be_get_info(res);
481 info->out_infos[0].req = &sparc_requirements_gp_gp;
485 res = optimize_node(res);
486 irn_vrfy_irg(res, current_ir_graph);
494 ir_node *new_bd_sparc_Tst(dbg_info *dbgi, ir_node *block, ir_node *left, bool ins_permuted, bool is_unsigned)
497 ir_op *op = op_sparc_Tst;
499 backend_info_t *info;
503 ir_mode *mode = mode_Bu;
504 static const be_execution_unit_t ***exec_units = NULL;
505 static const arch_register_req_t *in_reqs[] =
507 & sparc_requirements_gp_gp,
510 /* construct in array */
514 flags |= arch_irn_flags_rematerializable;
515 flags |= arch_irn_flags_modify_flags;
519 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
521 /* init node attributes */
522 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
524 init_sparc_cmp_attr(res, ins_permuted, is_unsigned);
525 info = be_get_info(res);
526 info->out_infos[0].req = &sparc_requirements_flags_flags;
530 res = optimize_node(res);
531 irn_vrfy_irg(res, current_ir_graph);
537 * construct SwitchJmp node
539 ir_node *new_bd_sparc_SwitchJmp(dbg_info *dbgi, ir_node *block, ir_node *op0, int n_projs, long def_proj_num)
542 ir_op *op = op_sparc_SwitchJmp;
544 backend_info_t *info;
548 ir_mode *mode = mode_T;
549 static const be_execution_unit_t ***exec_units = NULL;
550 static const arch_register_req_t *in_reqs[] =
552 & sparc_requirements_gp_gp,
554 sparc_jmp_switch_attr_t *attr;
556 /* construct in array */
561 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
563 /* init node attributes */
564 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
566 info = be_get_info(res);
567 info->out_infos[0].req = &sparc_requirements__none;
570 attr = get_irn_generic_attr(res);
571 set_sparc_jmp_switch_n_projs(res, n_projs);
572 set_sparc_jmp_switch_default_proj_num(res, def_proj_num);
574 res = optimize_node(res);
575 irn_vrfy_irg(res, current_ir_graph);
583 ir_node *new_bd_sparc_Cmp_imm(dbg_info *dbgi, ir_node *block, ir_node *left, int immediate_value, bool ins_permuted, bool is_unsigned)
586 ir_op *op = op_sparc_Cmp;
588 backend_info_t *info;
592 ir_mode *mode = mode_Bu;
593 static const be_execution_unit_t ***exec_units = NULL;
594 static const arch_register_req_t *in_reqs[] =
596 & sparc_requirements_gp_gp,
599 /* construct in array */
603 flags |= arch_irn_flags_rematerializable;
604 flags |= arch_irn_flags_modify_flags;
608 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
610 /* init node attributes */
611 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
613 sparc_set_attr_imm(res, immediate_value); init_sparc_cmp_attr(res, ins_permuted, is_unsigned);
614 info = be_get_info(res);
615 info->out_infos[0].req = &sparc_requirements_flags_flags;
619 res = optimize_node(res);
620 irn_vrfy_irg(res, current_ir_graph);
628 ir_node *new_bd_sparc_Cmp_reg(dbg_info *dbgi, ir_node *block, ir_node *left, ir_node *right, bool ins_permuted, bool is_unsigned)
631 ir_op *op = op_sparc_Cmp;
633 backend_info_t *info;
637 ir_mode *mode = mode_Bu;
638 static const be_execution_unit_t ***exec_units = NULL;
639 static const arch_register_req_t *in_reqs[] =
641 & sparc_requirements_gp_gp,
642 & sparc_requirements_gp_gp,
645 /* construct in array */
650 flags |= arch_irn_flags_rematerializable;
651 flags |= arch_irn_flags_modify_flags;
655 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
657 /* init node attributes */
658 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
660 init_sparc_cmp_attr(res, ins_permuted, is_unsigned);
661 info = be_get_info(res);
662 info->out_infos[0].req = &sparc_requirements_flags_flags;
666 res = optimize_node(res);
667 irn_vrfy_irg(res, current_ir_graph);
673 * construct SymConst node
675 ir_node *new_bd_sparc_SymConst(dbg_info *dbgi, ir_node *block, ir_entity *entity)
678 ir_op *op = op_sparc_SymConst;
680 backend_info_t *info;
684 ir_mode *mode = mode_Iu;
685 static const be_execution_unit_t ***exec_units = NULL;
686 static const arch_register_req_t **in_reqs = NULL;
689 flags |= arch_irn_flags_rematerializable;
693 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
695 /* init node attributes */
696 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
697 init_sparc_symconst_attributes(res, entity);
699 info = be_get_info(res);
700 info->out_infos[0].req = &sparc_requirements_gp_gp;
704 res = optimize_node(res);
705 irn_vrfy_irg(res, current_ir_graph);
713 ir_node *new_bd_sparc_Sub_imm(dbg_info *dbgi, ir_node *block, ir_node *left, int immediate_value)
716 ir_op *op = op_sparc_Sub;
718 backend_info_t *info;
722 ir_mode *mode = mode_Iu;
723 static const be_execution_unit_t ***exec_units = NULL;
724 static const arch_register_req_t *in_reqs[] =
726 & sparc_requirements_gp_gp,
729 /* construct in array */
733 flags |= arch_irn_flags_rematerializable;
737 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
739 /* init node attributes */
740 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
741 sparc_set_attr_imm(res, immediate_value);
742 info = be_get_info(res);
743 info->out_infos[0].req = &sparc_requirements_gp_gp;
747 res = optimize_node(res);
748 irn_vrfy_irg(res, current_ir_graph);
756 ir_node *new_bd_sparc_Sub_reg(dbg_info *dbgi, ir_node *block, ir_node *left, ir_node *right)
759 ir_op *op = op_sparc_Sub;
761 backend_info_t *info;
765 ir_mode *mode = mode_Iu;
766 static const be_execution_unit_t ***exec_units = NULL;
767 static const arch_register_req_t *in_reqs[] =
769 & sparc_requirements_gp_gp,
770 & sparc_requirements_gp_gp,
773 /* construct in array */
778 flags |= arch_irn_flags_rematerializable;
782 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
784 /* init node attributes */
785 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
787 info = be_get_info(res);
788 info->out_infos[0].req = &sparc_requirements_gp_gp;
792 res = optimize_node(res);
793 irn_vrfy_irg(res, current_ir_graph);
799 * construct Load: Load(ptr, mem) = LD ptr -> reg
801 ir_node *new_bd_sparc_Load(dbg_info *dbgi, ir_node *block, ir_node *ptr, ir_node *mem, ir_mode *ls_mode, ir_entity *entity, int entity_sign, long offset, bool is_frame_entity)
804 ir_op *op = op_sparc_Load;
806 backend_info_t *info;
810 ir_mode *mode = mode_T;
811 static const be_execution_unit_t ***exec_units = NULL;
812 static const arch_register_req_t *in_reqs[] =
814 & sparc_requirements_gp_gp,
815 & sparc_requirements__none,
818 /* construct in array */
824 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
826 /* init node attributes */
827 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
828 init_sparc_load_store_attributes(res, ls_mode, entity, entity_sign, offset, is_frame_entity);
830 info = be_get_info(res);
831 info->out_infos[0].req = &sparc_requirements_gp_gp;
832 info->out_infos[1].req = &sparc_requirements__none;
836 res = optimize_node(res);
837 irn_vrfy_irg(res, current_ir_graph);
845 ir_node *new_bd_sparc_AddSP(dbg_info *dbgi, ir_node *block, ir_node *stack, ir_node *size, ir_node *mem)
848 ir_op *op = op_sparc_AddSP;
850 backend_info_t *info;
854 ir_mode *mode = mode_T;
855 static const be_execution_unit_t ***exec_units = NULL;
856 static const arch_register_req_t *in_reqs[] =
858 & sparc_requirements_gp_sp,
859 & sparc_requirements_gp_gp,
860 & sparc_requirements__none,
863 /* construct in array */
870 res = new_ir_node(dbgi, current_ir_graph, block, op, mode, arity, in);
872 /* init node attributes */
873 init_sparc_attributes(res, flags, in_reqs, exec_units, n_res);
875 info = be_get_info(res);
876 info->out_infos[0].req = &sparc_requirements_gp_sp_I_S;
877 info->out_infos[1].req = &sparc_requirements_gp_gp;
878 info->out_infos[2].req = &sparc_requirements__none;
882 res = optimize_node(res);
883 irn_vrfy_irg(res, current_ir_graph);
891 * Creates the sparc specific Firm machine operations
892 * needed for the assembler irgs.
894 void sparc_create_opcodes(const arch_irn_ops_t *be_ops) {
895 #define N irop_flag_none
896 #define L irop_flag_labeled
897 #define C irop_flag_commutative
898 #define X irop_flag_cfopcode
899 #define I irop_flag_ip_cfopcode
900 #define F irop_flag_fragile
901 #define Y irop_flag_forking
902 #define H irop_flag_highlevel
903 #define c irop_flag_constlike
904 #define K irop_flag_keep
905 #define M irop_flag_machine
906 #define O irop_flag_machine_op
907 #define NB irop_flag_dump_noblock
908 #define NI irop_flag_dump_noinput
909 #define R (irop_flag_user << 0)
913 static int run_once = 0;
919 cur_opcode = get_next_ir_opcodes(iro_sparc_last);
921 sparc_opcode_start = cur_opcode;
923 memset(&ops, 0, sizeof(ops));
925 ops.dump_node = sparc_dump_node;
926 ops.node_cmp_attr = cmp_attr_sparc;
927 ops.copy_attr = sparc_copy_attr;
928 op_sparc_SubSP = new_ir_op(cur_opcode + iro_sparc_SubSP, "sparc_SubSP", op_pin_state_floats, N|M, oparity_trinary, 0, sizeof(sparc_attr_t), &ops);
929 set_op_tag(op_sparc_SubSP, sparc_op_tag);
931 memset(&ops, 0, sizeof(ops));
933 ops.dump_node = sparc_dump_node;
934 ops.node_cmp_attr = cmp_attr_sparc;
935 ops.copy_attr = sparc_copy_attr;
936 op_sparc_Add = new_ir_op(cur_opcode + iro_sparc_Add, "sparc_Add", op_pin_state_floats, C|M, oparity_zero, 0, sizeof(sparc_attr_t), &ops);
937 set_op_tag(op_sparc_Add, sparc_op_tag);
939 memset(&ops, 0, sizeof(ops));
941 ops.dump_node = sparc_dump_node;
942 ops.node_cmp_attr = cmp_attr_sparc_symconst;
943 ops.copy_attr = sparc_copy_attr;
944 op_sparc_FrameAddr = new_ir_op(cur_opcode + iro_sparc_FrameAddr, "sparc_FrameAddr", op_pin_state_floats, c|M, oparity_unary, 0, sizeof(sparc_symconst_attr_t), &ops);
945 set_op_tag(op_sparc_FrameAddr, sparc_op_tag);
947 memset(&ops, 0, sizeof(ops));
949 ops.dump_node = sparc_dump_node;
950 ops.node_cmp_attr = cmp_attr_sparc_load_store;
951 ops.copy_attr = sparc_copy_attr;
952 op_sparc_Store = new_ir_op(cur_opcode + iro_sparc_Store, "sparc_Store", op_pin_state_exc_pinned, L|F|M, oparity_trinary, 0, sizeof(sparc_load_store_attr_t), &ops);
953 set_op_tag(op_sparc_Store, sparc_op_tag);
955 memset(&ops, 0, sizeof(ops));
957 ops.dump_node = sparc_dump_node;
958 ops.node_cmp_attr = cmp_attr_sparc_jmp_cond;
959 ops.copy_attr = sparc_copy_attr;
960 op_sparc_Branch = new_ir_op(cur_opcode + iro_sparc_Branch, "sparc_Branch", op_pin_state_pinned, L|X|Y|M, oparity_unary, 0, sizeof(sparc_jmp_cond_attr_t), &ops);
961 set_op_tag(op_sparc_Branch, sparc_op_tag);
963 memset(&ops, 0, sizeof(ops));
965 ops.dump_node = sparc_dump_node;
966 ops.node_cmp_attr = cmp_attr_sparc;
967 ops.copy_attr = sparc_copy_attr;
968 op_sparc_Mov = new_ir_op(cur_opcode + iro_sparc_Mov, "sparc_Mov", op_pin_state_floats, N|M, oparity_variable, 0, sizeof(sparc_attr_t), &ops);
969 set_op_tag(op_sparc_Mov, sparc_op_tag);
971 memset(&ops, 0, sizeof(ops));
973 ops.dump_node = sparc_dump_node;
974 ops.node_cmp_attr = cmp_attr_sparc_cmp;
975 ops.copy_attr = sparc_copy_attr;
976 op_sparc_Tst = new_ir_op(cur_opcode + iro_sparc_Tst, "sparc_Tst", op_pin_state_floats, N|M, oparity_unary, 0, sizeof(sparc_cmp_attr_t), &ops);
977 set_op_tag(op_sparc_Tst, sparc_op_tag);
979 memset(&ops, 0, sizeof(ops));
981 ops.dump_node = sparc_dump_node;
982 ops.node_cmp_attr = cmp_attr_sparc_jmp_switch;
983 ops.copy_attr = sparc_copy_attr;
984 op_sparc_SwitchJmp = new_ir_op(cur_opcode + iro_sparc_SwitchJmp, "sparc_SwitchJmp", op_pin_state_pinned, L|X|Y|M, oparity_unary, 0, sizeof(sparc_jmp_switch_attr_t), &ops);
985 set_op_tag(op_sparc_SwitchJmp, sparc_op_tag);
987 memset(&ops, 0, sizeof(ops));
989 ops.dump_node = sparc_dump_node;
990 ops.node_cmp_attr = cmp_attr_sparc_cmp;
991 ops.copy_attr = sparc_copy_attr;
992 op_sparc_Cmp = new_ir_op(cur_opcode + iro_sparc_Cmp, "sparc_Cmp", op_pin_state_floats, N|M, oparity_zero, 0, sizeof(sparc_cmp_attr_t), &ops);
993 set_op_tag(op_sparc_Cmp, sparc_op_tag);
995 memset(&ops, 0, sizeof(ops));
997 ops.dump_node = sparc_dump_node;
998 ops.node_cmp_attr = cmp_attr_sparc_symconst;
999 ops.copy_attr = sparc_copy_attr;
1000 op_sparc_SymConst = new_ir_op(cur_opcode + iro_sparc_SymConst, "sparc_SymConst", op_pin_state_floats, c|M, oparity_zero, 0, sizeof(sparc_symconst_attr_t), &ops);
1001 set_op_tag(op_sparc_SymConst, sparc_op_tag);
1003 memset(&ops, 0, sizeof(ops));
1004 ops.be_ops = be_ops;
1005 ops.dump_node = sparc_dump_node;
1006 ops.node_cmp_attr = cmp_attr_sparc;
1007 ops.copy_attr = sparc_copy_attr;
1008 op_sparc_Sub = new_ir_op(cur_opcode + iro_sparc_Sub, "sparc_Sub", op_pin_state_floats, N|M, oparity_binary, 0, sizeof(sparc_attr_t), &ops);
1009 set_op_tag(op_sparc_Sub, sparc_op_tag);
1011 memset(&ops, 0, sizeof(ops));
1012 ops.be_ops = be_ops;
1013 ops.dump_node = sparc_dump_node;
1014 ops.node_cmp_attr = cmp_attr_sparc_load_store;
1015 ops.copy_attr = sparc_copy_attr;
1016 op_sparc_Load = new_ir_op(cur_opcode + iro_sparc_Load, "sparc_Load", op_pin_state_exc_pinned, L|F|M, oparity_binary, 0, sizeof(sparc_load_store_attr_t), &ops);
1017 set_op_tag(op_sparc_Load, sparc_op_tag);
1019 memset(&ops, 0, sizeof(ops));
1020 ops.be_ops = be_ops;
1021 ops.dump_node = sparc_dump_node;
1022 ops.node_cmp_attr = cmp_attr_sparc;
1023 ops.copy_attr = sparc_copy_attr;
1024 op_sparc_AddSP = new_ir_op(cur_opcode + iro_sparc_AddSP, "sparc_AddSP", op_pin_state_floats, N|M, oparity_trinary, 0, sizeof(sparc_attr_t), &ops);
1025 set_op_tag(op_sparc_AddSP, sparc_op_tag);
1027 sparc_opcode_end = cur_opcode + iro_sparc_last;