1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost
= { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
73 struct processor_costs i486_cost
= { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
83 struct processor_costs pentium_cost
= {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
93 struct processor_costs pentiumpro_cost
= {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
103 struct processor_costs k6_cost
= {
104 1, /* cost of an add instruction */
105 1, /* cost of a lea instruction */
106 1, /* variable shift costs */
107 1, /* constant shift costs */
108 2, /* cost of starting a multiply */
109 0, /* cost of multiply per each bit set */
110 18 /* cost of a divide/mod */
113 struct processor_costs
*ix86_cost
= &pentium_cost
;
115 /* Processor feature/optimization bitmasks. */
116 #define m_386 (1<<PROCESSOR_I386)
117 #define m_486 (1<<PROCESSOR_I486)
118 #define m_PENT (1<<PROCESSOR_PENTIUM)
119 #define m_PPRO (1<<PROCESSOR_PENTIUMPRO)
120 #define m_K6 (1<<PROCESSOR_K6)
122 const int x86_use_leave
= m_386
| m_K6
;
123 const int x86_push_memory
= m_386
| m_K6
;
124 const int x86_zero_extend_with_and
= m_486
| m_PENT
;
125 const int x86_movx
= m_386
| m_PPRO
| m_K6
;
126 const int x86_double_with_add
= ~m_386
;
127 const int x86_use_bit_test
= m_386
;
128 const int x86_unroll_strlen
= m_486
| m_PENT
| m_PPRO
;
129 const int x86_use_q_reg
= m_PENT
| m_PPRO
| m_K6
;
130 const int x86_use_any_reg
= m_486
;
131 const int x86_cmove
= m_PPRO
;
132 const int x86_deep_branch
= m_PPRO
| m_K6
;
134 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
136 extern FILE *asm_out_file
;
137 extern char *strcat ();
139 static void ix86_epilogue
PROTO((int));
140 static void ix86_prologue
PROTO((int));
142 char *singlemove_string ();
143 char *output_move_const_single ();
144 char *output_fp_cc0_set ();
146 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
147 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
148 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
150 /* Array of the smallest class containing reg number REGNO, indexed by
151 REGNO. Used by REGNO_REG_CLASS in i386.h. */
153 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
156 AREG
, DREG
, CREG
, BREG
,
158 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
160 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
161 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
166 /* Test and compare insns in i386.md store the information needed to
167 generate branch and scc insns here. */
169 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
170 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
171 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
173 /* which cpu are we scheduling for */
174 enum processor_type ix86_cpu
;
176 /* which instruction set architecture to use. */
179 /* Strings to hold which cpu and instruction set architecture to use. */
180 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
181 char *ix86_arch_string
; /* for -march=<xxx> */
183 /* Register allocation order */
184 char *i386_reg_alloc_order
;
185 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
187 /* # of registers to use to pass arguments. */
188 char *i386_regparm_string
;
190 /* i386_regparm_string as a number */
193 /* Alignment to use for loops and jumps: */
195 /* Power of two alignment for loops. */
196 char *i386_align_loops_string
;
198 /* Power of two alignment for non-loop jumps. */
199 char *i386_align_jumps_string
;
201 /* Values 1-5: see jump.c */
202 int i386_branch_cost
;
203 char *i386_branch_cost_string
;
205 /* Power of two alignment for functions. */
206 int i386_align_funcs
;
207 char *i386_align_funcs_string
;
209 /* Power of two alignment for loops. */
210 int i386_align_loops
;
212 /* Power of two alignment for non-loop jumps. */
213 int i386_align_jumps
;
215 /* Sometimes certain combinations of command options do not make
216 sense on a particular target machine. You can define a macro
217 `OVERRIDE_OPTIONS' to take account of this. This macro, if
218 defined, is executed once just after all the command options have
221 Don't use this macro to turn on various extra optimizations for
222 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
232 char *name
; /* Canonical processor name. */
233 enum processor_type processor
; /* Processor type enum value. */
234 struct processor_costs
*cost
; /* Processor costs */
235 int target_enable
; /* Target flags to enable. */
236 int target_disable
; /* Target flags to disable. */
237 } processor_target_table
[]
238 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
239 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
240 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
241 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
242 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
,
244 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
,
245 &pentiumpro_cost
, 0, 0},
246 {PROCESSOR_K6_STRING
, PROCESSOR_K6
, &k6_cost
, 0, 0}};
248 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
250 #ifdef SUBTARGET_OVERRIDE_OPTIONS
251 SUBTARGET_OVERRIDE_OPTIONS
;
254 /* Validate registers in register allocation order. */
255 if (i386_reg_alloc_order
)
257 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
263 case 'a': regno
= 0; break;
264 case 'd': regno
= 1; break;
265 case 'c': regno
= 2; break;
266 case 'b': regno
= 3; break;
267 case 'S': regno
= 4; break;
268 case 'D': regno
= 5; break;
269 case 'B': regno
= 6; break;
271 default: fatal ("Register '%c' is unknown", ch
);
274 if (regs_allocated
[regno
])
275 fatal ("Register '%c' already specified in allocation order", ch
);
277 regs_allocated
[regno
] = 1;
281 if (ix86_arch_string
== 0)
283 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
284 if (ix86_cpu_string
== 0)
285 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
288 for (i
= 0; i
< ptt_size
; i
++)
289 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
291 ix86_arch
= processor_target_table
[i
].processor
;
292 if (ix86_cpu_string
== 0)
293 ix86_cpu_string
= processor_target_table
[i
].name
;
299 error ("bad value (%s) for -march= switch", ix86_arch_string
);
300 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
301 ix86_arch
= PROCESSOR_DEFAULT
;
304 if (ix86_cpu_string
== 0)
305 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
307 for (j
= 0; j
< ptt_size
; j
++)
308 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
310 ix86_cpu
= processor_target_table
[j
].processor
;
311 ix86_cost
= processor_target_table
[j
].cost
;
312 if (i
> j
&& (int) ix86_arch
>= (int) PROCESSOR_K6
)
313 error ("-mcpu=%s does not support -march=%s",
314 ix86_cpu_string
, ix86_arch_string
);
316 target_flags
|= processor_target_table
[j
].target_enable
;
317 target_flags
&= ~processor_target_table
[j
].target_disable
;
323 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
324 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
325 ix86_cpu
= PROCESSOR_DEFAULT
;
328 /* Validate -mregparm= value. */
329 if (i386_regparm_string
)
331 i386_regparm
= atoi (i386_regparm_string
);
332 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
333 fatal ("-mregparm=%d is not between 0 and %d",
334 i386_regparm
, REGPARM_MAX
);
337 /* The 486 suffers more from non-aligned cache line fills, and the
338 larger code size results in a larger cache foot-print and more misses.
339 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
341 def_align
= (TARGET_486
) ? 4 : 2;
343 /* Validate -malign-loops= value, or provide default. */
344 if (i386_align_loops_string
)
346 i386_align_loops
= atoi (i386_align_loops_string
);
347 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
348 fatal ("-malign-loops=%d is not between 0 and %d",
349 i386_align_loops
, MAX_CODE_ALIGN
);
352 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
353 i386_align_loops
= 4;
355 i386_align_loops
= 2;
358 /* Validate -malign-jumps= value, or provide default. */
359 if (i386_align_jumps_string
)
361 i386_align_jumps
= atoi (i386_align_jumps_string
);
362 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
363 fatal ("-malign-jumps=%d is not between 0 and %d",
364 i386_align_jumps
, MAX_CODE_ALIGN
);
367 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
368 i386_align_jumps
= 4;
370 i386_align_jumps
= def_align
;
373 /* Validate -malign-functions= value, or provide default. */
374 if (i386_align_funcs_string
)
376 i386_align_funcs
= atoi (i386_align_funcs_string
);
377 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
378 fatal ("-malign-functions=%d is not between 0 and %d",
379 i386_align_funcs
, MAX_CODE_ALIGN
);
382 i386_align_funcs
= def_align
;
384 /* Validate -mbranch-cost= value, or provide default. */
385 if (i386_branch_cost_string
)
387 i386_branch_cost
= atoi (i386_branch_cost_string
);
388 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
389 fatal ("-mbranch-cost=%d is not between 0 and 5",
393 i386_branch_cost
= 1;
395 /* Keep nonleaf frame pointers. */
396 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
397 flag_omit_frame_pointer
= 1;
400 /* A C statement (sans semicolon) to choose the order in which to
401 allocate hard registers for pseudo-registers local to a basic
404 Store the desired register order in the array `reg_alloc_order'.
405 Element 0 should be the register to allocate first; element 1, the
406 next register; and so on.
408 The macro body should not assume anything about the contents of
409 `reg_alloc_order' before execution of the macro.
411 On most machines, it is not necessary to define this macro. */
414 order_regs_for_local_alloc ()
418 /* User specified the register allocation order. */
420 if (i386_reg_alloc_order
)
422 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
428 case 'a': regno
= 0; break;
429 case 'd': regno
= 1; break;
430 case 'c': regno
= 2; break;
431 case 'b': regno
= 3; break;
432 case 'S': regno
= 4; break;
433 case 'D': regno
= 5; break;
434 case 'B': regno
= 6; break;
437 reg_alloc_order
[order
++] = regno
;
440 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
442 if (! regs_allocated
[i
])
443 reg_alloc_order
[order
++] = i
;
447 /* If user did not specify a register allocation order, use natural order. */
450 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
451 reg_alloc_order
[i
] = i
;
456 optimization_options (level
, size
)
458 int size ATTRIBUTE_UNUSED
;
460 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
461 make the problem with not enough registers even worse. */
462 #ifdef INSN_SCHEDULING
464 flag_schedule_insns
= 0;
468 /* Sign-extend a 16-bit constant */
471 i386_sext16_if_const (op
)
474 if (GET_CODE (op
) == CONST_INT
)
476 HOST_WIDE_INT val
= INTVAL (op
);
477 HOST_WIDE_INT sext_val
;
479 sext_val
= val
| ~0xffff;
481 sext_val
= val
& 0xffff;
483 op
= GEN_INT (sext_val
);
488 /* Return nonzero if the rtx is aligned */
491 i386_aligned_reg_p (regno
)
494 return (regno
== STACK_POINTER_REGNUM
495 || (! flag_omit_frame_pointer
&& regno
== FRAME_POINTER_REGNUM
));
502 /* Registers and immediate operands are always "aligned". */
503 if (GET_CODE (op
) != MEM
)
506 /* Don't even try to do any aligned optimizations with volatiles. */
507 if (MEM_VOLATILE_P (op
))
510 /* Get address of memory operand. */
513 switch (GET_CODE (op
))
520 /* Match "reg + offset" */
522 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
524 if (INTVAL (XEXP (op
, 1)) & 3)
528 if (GET_CODE (op
) != REG
)
531 /* ... fall through ... */
534 return i386_aligned_reg_p (REGNO (op
));
543 /* Return nonzero if INSN looks like it won't compute useful cc bits
544 as a side effect. This information is only a hint. */
547 i386_cc_probably_useless_p (insn
)
550 return ! next_cc0_user (insn
);
553 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
554 attribute for DECL. The attributes in ATTRIBUTES have previously been
558 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
559 tree decl ATTRIBUTE_UNUSED
;
560 tree attributes ATTRIBUTE_UNUSED
;
561 tree identifier ATTRIBUTE_UNUSED
;
562 tree args ATTRIBUTE_UNUSED
;
567 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
568 attribute for TYPE. The attributes in ATTRIBUTES have previously been
572 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
574 tree attributes ATTRIBUTE_UNUSED
;
578 if (TREE_CODE (type
) != FUNCTION_TYPE
579 && TREE_CODE (type
) != METHOD_TYPE
580 && TREE_CODE (type
) != FIELD_DECL
581 && TREE_CODE (type
) != TYPE_DECL
)
584 /* Stdcall attribute says callee is responsible for popping arguments
585 if they are not variable. */
586 if (is_attribute_p ("stdcall", identifier
))
587 return (args
== NULL_TREE
);
589 /* Cdecl attribute says the callee is a normal C declaration. */
590 if (is_attribute_p ("cdecl", identifier
))
591 return (args
== NULL_TREE
);
593 /* Regparm attribute specifies how many integer arguments are to be
594 passed in registers. */
595 if (is_attribute_p ("regparm", identifier
))
599 if (! args
|| TREE_CODE (args
) != TREE_LIST
600 || TREE_CHAIN (args
) != NULL_TREE
601 || TREE_VALUE (args
) == NULL_TREE
)
604 cst
= TREE_VALUE (args
);
605 if (TREE_CODE (cst
) != INTEGER_CST
)
608 if (TREE_INT_CST_HIGH (cst
) != 0
609 || TREE_INT_CST_LOW (cst
) < 0
610 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
619 /* Return 0 if the attributes for two types are incompatible, 1 if they
620 are compatible, and 2 if they are nearly compatible (which causes a
621 warning to be generated). */
624 i386_comp_type_attributes (type1
, type2
)
625 tree type1 ATTRIBUTE_UNUSED
;
626 tree type2 ATTRIBUTE_UNUSED
;
632 /* Value is the number of bytes of arguments automatically
633 popped when returning from a subroutine call.
634 FUNDECL is the declaration node of the function (as a tree),
635 FUNTYPE is the data type of the function (as a tree),
636 or for a library call it is an identifier node for the subroutine name.
637 SIZE is the number of bytes of arguments passed on the stack.
639 On the 80386, the RTD insn may be used to pop them if the number
640 of args is fixed, but if the number is variable then the caller
641 must pop them all. RTD can't be used for library calls now
642 because the library is compiled with the Unix compiler.
643 Use of RTD is a selectable option, since it is incompatible with
644 standard Unix calling sequences. If the option is not selected,
645 the caller must always pop the args.
647 The attribute stdcall is equivalent to RTD on a per module basis. */
650 i386_return_pops_args (fundecl
, funtype
, size
)
655 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
657 /* Cdecl functions override -mrtd, and never pop the stack. */
658 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
660 /* Stdcall functions will pop the stack if not variable args. */
661 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
665 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
666 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
)))
671 /* Lose any fake structure return argument. */
672 if (aggregate_value_p (TREE_TYPE (funtype
)))
673 return GET_MODE_SIZE (Pmode
);
679 /* Argument support functions. */
681 /* Initialize a variable CUM of type CUMULATIVE_ARGS
682 for a call to a function whose data type is FNTYPE.
683 For a library call, FNTYPE is 0. */
686 init_cumulative_args (cum
, fntype
, libname
)
687 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize */
688 tree fntype
; /* tree ptr for function decl */
689 rtx libname
; /* SYMBOL_REF of library name or 0 */
691 static CUMULATIVE_ARGS zero_cum
;
692 tree param
, next_param
;
694 if (TARGET_DEBUG_ARG
)
696 fprintf (stderr
, "\ninit_cumulative_args (");
698 fprintf (stderr
, "fntype code = %s, ret code = %s",
699 tree_code_name
[(int) TREE_CODE (fntype
)],
700 tree_code_name
[(int) TREE_CODE (TREE_TYPE (fntype
))]);
702 fprintf (stderr
, "no fntype");
705 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
710 /* Set up the number of registers to use for passing arguments. */
711 cum
->nregs
= i386_regparm
;
714 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
717 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
720 /* Determine if this function has variable arguments. This is
721 indicated by the last argument being 'void_type_mode' if there
722 are no variable arguments. If there are variable arguments, then
723 we won't pass anything in registers */
727 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
728 param
!= 0; param
= next_param
)
730 next_param
= TREE_CHAIN (param
);
731 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
736 if (TARGET_DEBUG_ARG
)
737 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
742 /* Update the data in CUM to advance over an argument
743 of mode MODE and data type TYPE.
744 (TYPE is null for libcalls where that information may not be available.) */
747 function_arg_advance (cum
, mode
, type
, named
)
748 CUMULATIVE_ARGS
*cum
; /* current arg information */
749 enum machine_mode mode
; /* current arg mode */
750 tree type
; /* type of the argument or 0 if lib support */
751 int named
; /* whether or not the argument was named */
754 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
755 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
757 if (TARGET_DEBUG_ARG
)
759 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
760 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
775 /* Define where to put the arguments to a function.
776 Value is zero to push the argument on the stack,
777 or a hard register in which to store the argument.
779 MODE is the argument's machine mode.
780 TYPE is the data type of the argument (as a tree).
781 This is null for libcalls where that information may
783 CUM is a variable of type CUMULATIVE_ARGS which gives info about
784 the preceding args and about the function being called.
785 NAMED is nonzero if this argument is a named parameter
786 (otherwise it is an extra parameter matching an ellipsis). */
789 function_arg (cum
, mode
, type
, named
)
790 CUMULATIVE_ARGS
*cum
; /* current arg information */
791 enum machine_mode mode
; /* current arg mode */
792 tree type
; /* type of the argument or 0 if lib support */
793 int named
; /* != 0 for normal args, == 0 for ... args */
797 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
798 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
802 /* For now, pass fp/complex values on the stack. */
811 if (words
<= cum
->nregs
)
812 ret
= gen_rtx_REG (mode
, cum
->regno
);
816 if (TARGET_DEBUG_ARG
)
819 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
820 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
823 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
825 fprintf (stderr
, ", stack");
827 fprintf (stderr
, " )\n");
833 /* For an arg passed partly in registers and partly in memory,
834 this is the number of registers used.
835 For args passed entirely in registers or entirely in memory, zero. */
838 function_arg_partial_nregs (cum
, mode
, type
, named
)
839 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
; /* current arg information */
840 enum machine_mode mode ATTRIBUTE_UNUSED
; /* current arg mode */
841 tree type ATTRIBUTE_UNUSED
; /* type of the argument or 0 if lib support */
842 int named ATTRIBUTE_UNUSED
; /* != 0 for normal args, == 0 for ... args */
847 /* Output an insn whose source is a 386 integer register. SRC is the
848 rtx for the register, and TEMPLATE is the op-code template. SRC may
849 be either SImode or DImode.
851 The template will be output with operands[0] as SRC, and operands[1]
852 as a pointer to the top of the 386 stack. So a call from floatsidf2
853 would look like this:
855 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
857 where %z0 corresponds to the caller's operands[1], and is used to
858 emit the proper size suffix.
860 ??? Extend this to handle HImode - a 387 can load and store HImode
864 output_op_from_reg (src
, template)
869 int size
= GET_MODE_SIZE (GET_MODE (src
));
872 xops
[1] = AT_SP (Pmode
);
873 xops
[2] = GEN_INT (size
);
874 xops
[3] = stack_pointer_rtx
;
876 if (size
> UNITS_PER_WORD
)
880 if (size
> 2 * UNITS_PER_WORD
)
882 high
= gen_rtx_REG (SImode
, REGNO (src
) + 2);
883 output_asm_insn (AS1 (push
%L0
,%0), &high
);
886 high
= gen_rtx_REG (SImode
, REGNO (src
) + 1);
887 output_asm_insn (AS1 (push
%L0
,%0), &high
);
890 output_asm_insn (AS1 (push
%L0
,%0), &src
);
891 output_asm_insn (template, xops
);
892 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
895 /* Output an insn to pop an value from the 387 top-of-stack to 386
896 register DEST. The 387 register stack is popped if DIES is true. If
897 the mode of DEST is an integer mode, a `fist' integer store is done,
898 otherwise a `fst' float store is done. */
901 output_to_reg (dest
, dies
, scratch_mem
)
907 int size
= GET_MODE_SIZE (GET_MODE (dest
));
910 xops
[0] = AT_SP (Pmode
);
912 xops
[0] = scratch_mem
;
914 xops
[1] = stack_pointer_rtx
;
915 xops
[2] = GEN_INT (size
);
919 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
921 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
924 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
925 else if (GET_MODE (xops
[3]) == DImode
&& ! dies
)
927 /* There is no DImode version of this without a stack pop, so
928 we must emulate it. It doesn't matter much what the second
929 instruction is, because the value being pushed on the FP stack
930 is not used except for the following stack popping store.
931 This case can only happen without optimization, so it doesn't
932 matter that it is inefficient. */
933 output_asm_insn (AS1 (fistp
%z3
,%0), xops
);
934 output_asm_insn (AS1 (fild
%z3
,%0), xops
);
937 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
940 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
943 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
946 if (GET_MODE (dest
) == XFmode
)
948 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
949 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
952 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
960 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
962 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
965 if (size
> UNITS_PER_WORD
)
967 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
969 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
972 xops
[0] = adj_offsettable_operand (xops
[0], 4);
974 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
977 if (size
> 2 * UNITS_PER_WORD
)
979 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
981 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
984 xops
[0] = adj_offsettable_operand (xops
[0], 4);
985 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
992 singlemove_string (operands
)
996 if (GET_CODE (operands
[0]) == MEM
997 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
999 if (XEXP (x
, 0) != stack_pointer_rtx
)
1001 return "push%L1 %1";
1003 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1004 return output_move_const_single (operands
);
1005 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
1006 return AS2 (mov
%L0
,%1,%0);
1007 else if (CONSTANT_P (operands
[1]))
1008 return AS2 (mov
%L0
,%1,%0);
1011 output_asm_insn ("push%L1 %1", operands
);
1016 /* Return a REG that occurs in ADDR with coefficient 1.
1017 ADDR can be effectively incremented by incrementing REG. */
1020 find_addr_reg (addr
)
1023 while (GET_CODE (addr
) == PLUS
)
1025 if (GET_CODE (XEXP (addr
, 0)) == REG
)
1026 addr
= XEXP (addr
, 0);
1027 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
1028 addr
= XEXP (addr
, 1);
1029 else if (CONSTANT_P (XEXP (addr
, 0)))
1030 addr
= XEXP (addr
, 1);
1031 else if (CONSTANT_P (XEXP (addr
, 1)))
1032 addr
= XEXP (addr
, 0);
1037 if (GET_CODE (addr
) == REG
)
1042 /* Output an insn to add the constant N to the register X. */
1053 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
1055 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
1056 else if (n
< 0 || n
== 128)
1058 xops
[1] = GEN_INT (-n
);
1059 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
1063 xops
[1] = GEN_INT (n
);
1064 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
1068 /* Output assembler code to perform a doubleword move insn
1069 with operands OPERANDS. */
1072 output_move_double (operands
)
1075 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
1079 rtx addreg0
= 0, addreg1
= 0;
1080 int dest_overlapped_low
= 0;
1081 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1086 /* First classify both operands. */
1088 if (REG_P (operands
[0]))
1090 else if (offsettable_memref_p (operands
[0]))
1092 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1094 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1096 else if (GET_CODE (operands
[0]) == MEM
)
1101 if (REG_P (operands
[1]))
1103 else if (CONSTANT_P (operands
[1]))
1105 else if (offsettable_memref_p (operands
[1]))
1107 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1109 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1111 else if (GET_CODE (operands
[1]) == MEM
)
1116 /* Check for the cases that the operand constraints are not
1117 supposed to allow to happen. Abort if we get one,
1118 because generating code for these cases is painful. */
1120 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1123 /* If one operand is decrementing and one is incrementing
1124 decrement the former register explicitly
1125 and change that operand into ordinary indexing. */
1127 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1129 /* ??? Can this ever happen on i386? */
1130 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1131 asm_add (-size
, operands
[0]);
1132 if (GET_MODE (operands
[1]) == XFmode
)
1133 operands
[0] = gen_rtx_MEM (XFmode
, operands
[0]);
1134 else if (GET_MODE (operands
[0]) == DFmode
)
1135 operands
[0] = gen_rtx_MEM (DFmode
, operands
[0]);
1137 operands
[0] = gen_rtx_MEM (DImode
, operands
[0]);
1141 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1143 /* ??? Can this ever happen on i386? */
1144 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1145 asm_add (-size
, operands
[1]);
1146 if (GET_MODE (operands
[1]) == XFmode
)
1147 operands
[1] = gen_rtx_MEM (XFmode
, operands
[1]);
1148 else if (GET_MODE (operands
[1]) == DFmode
)
1149 operands
[1] = gen_rtx_MEM (DFmode
, operands
[1]);
1151 operands
[1] = gen_rtx_MEM (DImode
, operands
[1]);
1155 /* If an operand is an unoffsettable memory ref, find a register
1156 we can increment temporarily to make it refer to the second word. */
1158 if (optype0
== MEMOP
)
1159 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1161 if (optype1
== MEMOP
)
1162 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1164 /* Ok, we can do one word at a time.
1165 Normally we do the low-numbered word first,
1166 but if either operand is autodecrementing then we
1167 do the high-numbered word first.
1169 In either case, set up in LATEHALF the operands to use
1170 for the high-numbered word and in some cases alter the
1171 operands in OPERANDS to be suitable for the low-numbered word. */
1175 if (optype0
== REGOP
)
1177 middlehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1178 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 2);
1180 else if (optype0
== OFFSOP
)
1182 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1183 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1187 middlehalf
[0] = operands
[0];
1188 latehalf
[0] = operands
[0];
1191 if (optype1
== REGOP
)
1193 middlehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1194 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 2);
1196 else if (optype1
== OFFSOP
)
1198 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1199 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1201 else if (optype1
== CNSTOP
)
1203 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1205 REAL_VALUE_TYPE r
; long l
[3];
1207 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1208 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1209 operands
[1] = GEN_INT (l
[0]);
1210 middlehalf
[1] = GEN_INT (l
[1]);
1211 latehalf
[1] = GEN_INT (l
[2]);
1213 else if (CONSTANT_P (operands
[1]))
1214 /* No non-CONST_DOUBLE constant should ever appear here. */
1219 middlehalf
[1] = operands
[1];
1220 latehalf
[1] = operands
[1];
1226 /* Size is not 12. */
1228 if (optype0
== REGOP
)
1229 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1230 else if (optype0
== OFFSOP
)
1231 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1233 latehalf
[0] = operands
[0];
1235 if (optype1
== REGOP
)
1236 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1237 else if (optype1
== OFFSOP
)
1238 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1239 else if (optype1
== CNSTOP
)
1240 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1242 latehalf
[1] = operands
[1];
1245 /* If insn is effectively movd N (sp),-(sp) then we will do the
1246 high word first. We should use the adjusted operand 1
1247 (which is N+4 (sp) or N+8 (sp))
1248 for the low word and middle word as well,
1249 to compensate for the first decrement of sp. */
1250 if (optype0
== PUSHOP
1251 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1252 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1253 middlehalf
[1] = operands
[1] = latehalf
[1];
1255 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1256 if the upper part of reg N does not appear in the MEM, arrange to
1257 emit the move late-half first. Otherwise, compute the MEM address
1258 into the upper part of N and use that as a pointer to the memory
1260 if (optype0
== REGOP
1261 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1263 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1264 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1266 /* If both halves of dest are used in the src memory address,
1267 compute the address into latehalf of dest. */
1269 xops
[0] = latehalf
[0];
1270 xops
[1] = XEXP (operands
[1], 0);
1271 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1272 if (GET_MODE (operands
[1]) == XFmode
)
1274 operands
[1] = gen_rtx_MEM (XFmode
, latehalf
[0]);
1275 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1276 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1280 operands
[1] = gen_rtx_MEM (DImode
, latehalf
[0]);
1281 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1286 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1288 /* Check for two regs used by both source and dest. */
1289 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1290 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1293 /* JRV says this can't happen: */
1294 if (addreg0
|| addreg1
)
1297 /* Only the middle reg conflicts; simply put it last. */
1298 output_asm_insn (singlemove_string (operands
), operands
);
1299 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1300 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1304 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1305 /* If the low half of dest is mentioned in the source memory
1306 address, the arrange to emit the move late half first. */
1307 dest_overlapped_low
= 1;
1310 /* If one or both operands autodecrementing,
1311 do the two words, high-numbered first. */
1313 /* Likewise, the first move would clobber the source of the second one,
1314 do them in the other order. This happens only for registers;
1315 such overlap can't happen in memory unless the user explicitly
1316 sets it up, and that is an undefined circumstance. */
1319 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1320 || (optype0
== REGOP
&& optype1
== REGOP
1321 && REGNO (operands
[0]) == REGNO (latehalf
[1]))
1322 || dest_overlapped_low
)
1325 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1326 || (optype0
== REGOP
&& optype1
== REGOP
1327 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1328 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1329 || dest_overlapped_low
)
1331 /* Make any unoffsettable addresses point at high-numbered word. */
1333 asm_add (size
-4, addreg0
);
1335 asm_add (size
-4, addreg1
);
1338 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1340 /* Undo the adds we just did. */
1342 asm_add (-4, addreg0
);
1344 asm_add (-4, addreg1
);
1348 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1350 asm_add (-4, addreg0
);
1352 asm_add (-4, addreg1
);
1355 /* Do low-numbered word. */
1356 return singlemove_string (operands
);
1359 /* Normal case: do the two words, low-numbered first. */
1361 output_asm_insn (singlemove_string (operands
), operands
);
1363 /* Do the middle one of the three words for long double */
1367 asm_add (4, addreg0
);
1369 asm_add (4, addreg1
);
1371 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1374 /* Make any unoffsettable addresses point at high-numbered word. */
1376 asm_add (4, addreg0
);
1378 asm_add (4, addreg1
);
1381 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1383 /* Undo the adds we just did. */
1385 asm_add (4-size
, addreg0
);
1387 asm_add (4-size
, addreg1
);
1392 #define MAX_TMPS 2 /* max temporary registers used */
1394 /* Output the appropriate code to move push memory on the stack */
1397 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1409 } tmp_info
[MAX_TMPS
];
1411 rtx src
= operands
[1];
1414 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1415 int stack_offset
= 0;
1419 if (! offsettable_memref_p (src
))
1420 fatal_insn ("Source is not offsettable", insn
);
1422 if ((length
& 3) != 0)
1423 fatal_insn ("Pushing non-word aligned size", insn
);
1425 /* Figure out which temporary registers we have available */
1426 for (i
= tmp_start
; i
< n_operands
; i
++)
1428 if (GET_CODE (operands
[i
]) == REG
)
1430 if (reg_overlap_mentioned_p (operands
[i
], src
))
1433 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1434 if (max_tmps
== MAX_TMPS
)
1440 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1442 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1443 output_asm_insn (AS1(push
%L0
,%0), xops
);
1449 for (offset
= length
- 4; offset
>= 0; )
1451 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1453 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1454 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1455 tmp_info
[num_tmps
].xops
[0]
1456 = adj_offsettable_operand (src
, offset
+ stack_offset
);
1460 for (i
= 0; i
< num_tmps
; i
++)
1461 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1463 for (i
= 0; i
< num_tmps
; i
++)
1464 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1467 stack_offset
+= 4*num_tmps
;
1473 /* Output the appropriate code to move data between two memory locations */
1476 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1488 } tmp_info
[MAX_TMPS
];
1490 rtx dest
= operands
[0];
1491 rtx src
= operands
[1];
1492 rtx qi_tmp
= NULL_RTX
;
1498 if (GET_CODE (dest
) == MEM
1499 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1500 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1501 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1503 if (! offsettable_memref_p (src
))
1504 fatal_insn ("Source is not offsettable", insn
);
1506 if (! offsettable_memref_p (dest
))
1507 fatal_insn ("Destination is not offsettable", insn
);
1509 /* Figure out which temporary registers we have available */
1510 for (i
= tmp_start
; i
< n_operands
; i
++)
1512 if (GET_CODE (operands
[i
]) == REG
)
1514 if ((length
& 1) != 0 && qi_tmp
== 0 && QI_REG_P (operands
[i
]))
1515 qi_tmp
= operands
[i
];
1517 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1518 fatal_insn ("Temporary register overlaps the destination", insn
);
1520 if (reg_overlap_mentioned_p (operands
[i
], src
))
1521 fatal_insn ("Temporary register overlaps the source", insn
);
1523 tmp_info
[max_tmps
++].xops
[2] = operands
[i
];
1524 if (max_tmps
== MAX_TMPS
)
1530 fatal_insn ("No scratch registers were found to do memory->memory moves",
1533 if ((length
& 1) != 0)
1536 fatal_insn ("No byte register found when moving odd # of bytes.",
1542 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1546 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1547 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1548 tmp_info
[num_tmps
].xops
[0]
1549 = adj_offsettable_operand (dest
, offset
);
1550 tmp_info
[num_tmps
].xops
[1]
1551 = adj_offsettable_operand (src
, offset
);
1557 else if (length
>= 2)
1559 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1560 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1561 tmp_info
[num_tmps
].xops
[0]
1562 = adj_offsettable_operand (dest
, offset
);
1563 tmp_info
[num_tmps
].xops
[1]
1564 = adj_offsettable_operand (src
, offset
);
1573 for (i
= 0; i
< num_tmps
; i
++)
1574 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1576 for (i
= 0; i
< num_tmps
; i
++)
1577 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1582 xops
[0] = adj_offsettable_operand (dest
, offset
);
1583 xops
[1] = adj_offsettable_operand (src
, offset
);
1585 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1586 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1593 standard_80387_constant_p (x
)
1596 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1601 if (setjmp (handler
))
1604 set_float_handler (handler
);
1605 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1606 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1607 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1608 set_float_handler (NULL_PTR
);
1616 /* Note that on the 80387, other constants, such as pi,
1617 are much slower to load as standard constants
1618 than to load from doubles in memory! */
1619 /* ??? Not true on K6: all constants are equal cost. */
1626 output_move_const_single (operands
)
1629 if (FP_REG_P (operands
[0]))
1631 int conval
= standard_80387_constant_p (operands
[1]);
1640 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1642 REAL_VALUE_TYPE r
; long l
;
1644 if (GET_MODE (operands
[1]) == XFmode
)
1647 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1648 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1649 operands
[1] = GEN_INT (l
);
1652 return singlemove_string (operands
);
1655 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1656 reference and a constant. */
1659 symbolic_operand (op
, mode
)
1661 enum machine_mode mode ATTRIBUTE_UNUSED
;
1663 switch (GET_CODE (op
))
1671 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1672 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1673 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1680 /* Test for a valid operand for a call instruction.
1681 Don't allow the arg pointer register or virtual regs
1682 since they may change into reg + const, which the patterns
1683 can't handle yet. */
1686 call_insn_operand (op
, mode
)
1688 enum machine_mode mode ATTRIBUTE_UNUSED
;
1690 if (GET_CODE (op
) == MEM
1691 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1692 /* This makes a difference for PIC. */
1693 && general_operand (XEXP (op
, 0), Pmode
))
1694 || (GET_CODE (XEXP (op
, 0)) == REG
1695 && XEXP (op
, 0) != arg_pointer_rtx
1696 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1697 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1703 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1707 expander_call_insn_operand (op
, mode
)
1709 enum machine_mode mode ATTRIBUTE_UNUSED
;
1711 if (GET_CODE (op
) == MEM
1712 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1713 || (GET_CODE (XEXP (op
, 0)) == REG
1714 && XEXP (op
, 0) != arg_pointer_rtx
1715 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1716 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1722 /* Return 1 if OP is a comparison operator that can use the condition code
1723 generated by an arithmetic operation. */
1726 arithmetic_comparison_operator (op
, mode
)
1728 enum machine_mode mode
;
1732 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1735 code
= GET_CODE (op
);
1736 if (GET_RTX_CLASS (code
) != '<')
1739 return (code
!= GT
&& code
!= LE
);
1743 ix86_logical_operator (op
, mode
)
1745 enum machine_mode mode ATTRIBUTE_UNUSED
;
1747 return GET_CODE (op
) == AND
|| GET_CODE (op
) == IOR
|| GET_CODE (op
) == XOR
;
1751 /* Returns 1 if OP contains a symbol reference */
1754 symbolic_reference_mentioned_p (op
)
1760 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1763 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1764 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1770 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1771 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1775 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1782 /* Attempt to expand a binary operator. Make the expansion closer to the
1783 actual machine, then just general_operand, which will allow 3 separate
1784 memory references (one output, two input) in a single insn. Return
1785 whether the insn fails, or succeeds. */
1788 ix86_expand_binary_operator (code
, mode
, operands
)
1790 enum machine_mode mode
;
1795 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1796 if (GET_RTX_CLASS (code
) == 'c'
1797 && (rtx_equal_p (operands
[0], operands
[2])
1798 || immediate_operand (operands
[1], mode
)))
1800 rtx temp
= operands
[1];
1801 operands
[1] = operands
[2];
1805 /* If optimizing, copy to regs to improve CSE */
1806 if (TARGET_PSEUDO
&& optimize
1807 && ((reload_in_progress
| reload_completed
) == 0))
1809 if (GET_CODE (operands
[1]) == MEM
1810 && ! rtx_equal_p (operands
[0], operands
[1]))
1811 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1813 if (GET_CODE (operands
[2]) == MEM
)
1814 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1816 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1818 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1820 emit_move_insn (temp
, operands
[1]);
1826 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1828 /* If not optimizing, try to make a valid insn (optimize code
1829 previously did this above to improve chances of CSE) */
1831 if ((! TARGET_PSEUDO
|| !optimize
)
1832 && ((reload_in_progress
| reload_completed
) == 0)
1833 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1836 if (GET_CODE (operands
[1]) == MEM
1837 && ! rtx_equal_p (operands
[0], operands
[1]))
1839 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1843 if (GET_CODE (operands
[2]) == MEM
)
1845 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1849 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1851 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1853 emit_move_insn (temp
, operands
[1]);
1858 if (modified
&& ! ix86_binary_operator_ok (code
, mode
, operands
))
1868 /* Return TRUE or FALSE depending on whether the binary operator meets the
1869 appropriate constraints. */
1872 ix86_binary_operator_ok (code
, mode
, operands
)
1874 enum machine_mode mode ATTRIBUTE_UNUSED
;
1877 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1878 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1881 /* Attempt to expand a unary operator. Make the expansion closer to the
1882 actual machine, then just general_operand, which will allow 2 separate
1883 memory references (one output, one input) in a single insn. Return
1884 whether the insn fails, or succeeds. */
1887 ix86_expand_unary_operator (code
, mode
, operands
)
1889 enum machine_mode mode
;
1892 /* If optimizing, copy to regs to improve CSE */
1895 && ((reload_in_progress
| reload_completed
) == 0)
1896 && GET_CODE (operands
[1]) == MEM
)
1897 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1899 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1901 if ((! TARGET_PSEUDO
|| optimize
== 0)
1902 && ((reload_in_progress
| reload_completed
) == 0)
1903 && GET_CODE (operands
[1]) == MEM
)
1905 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1906 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1916 /* Return TRUE or FALSE depending on whether the unary operator meets the
1917 appropriate constraints. */
1920 ix86_unary_operator_ok (code
, mode
, operands
)
1921 enum rtx_code code ATTRIBUTE_UNUSED
;
1922 enum machine_mode mode ATTRIBUTE_UNUSED
;
1923 rtx operands
[2] ATTRIBUTE_UNUSED
;
1928 static rtx pic_label_rtx
;
1929 static char pic_label_name
[256];
1930 static int pic_label_no
= 0;
1932 /* This function generates code for -fpic that loads %ebx with
1933 the return address of the caller and then returns. */
1936 asm_output_function_prefix (file
, name
)
1938 char *name ATTRIBUTE_UNUSED
;
1941 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1942 || current_function_uses_const_pool
);
1943 xops
[0] = pic_offset_table_rtx
;
1944 xops
[1] = stack_pointer_rtx
;
1946 /* Deep branch prediction favors having a return for every call. */
1947 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1951 if (pic_label_rtx
== 0)
1953 pic_label_rtx
= gen_label_rtx ();
1954 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
1955 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1958 prologue_node
= make_node (FUNCTION_DECL
);
1959 DECL_RESULT (prologue_node
) = 0;
1961 /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1962 internal (non-global) label that's being emitted, it didn't make
1963 sense to have .type information for local labels. This caused
1964 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1965 me debug info for a label that you're declaring non-global?) this
1966 was changed to call ASM_OUTPUT_LABEL() instead. */
1969 ASM_OUTPUT_LABEL (file
, pic_label_name
);
1970 output_asm_insn ("movl (%1),%0", xops
);
1971 output_asm_insn ("ret", xops
);
1975 /* Generate the assembly code for function entry.
1976 FILE is an stdio stream to output the code to.
1977 SIZE is an int: how many units of temporary storage to allocate. */
1980 function_prologue (file
, size
)
1981 FILE *file ATTRIBUTE_UNUSED
;
1982 int size ATTRIBUTE_UNUSED
;
1984 if (TARGET_SCHEDULE_PROLOGUE
)
1993 /* Expand the prologue into a bunch of separate insns. */
1996 ix86_expand_prologue ()
1998 if (! TARGET_SCHEDULE_PROLOGUE
)
2005 load_pic_register (do_rtl
)
2010 if (TARGET_DEEP_BRANCH_PREDICTION
)
2012 xops
[0] = pic_offset_table_rtx
;
2013 if (pic_label_rtx
== 0)
2015 pic_label_rtx
= gen_label_rtx ();
2016 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
2017 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
2020 xops
[1] = gen_rtx_MEM (QImode
,
2021 gen_rtx (SYMBOL_REF
, Pmode
,
2022 LABEL_NAME (pic_label_rtx
)));
2026 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
2027 emit_insn (gen_prologue_set_got (xops
[0],
2028 gen_rtx (SYMBOL_REF
, Pmode
,
2029 "$_GLOBAL_OFFSET_TABLE_"),
2034 output_asm_insn (AS1 (call
,%X1
), xops
);
2035 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
2042 xops
[0] = pic_offset_table_rtx
;
2043 xops
[1] = gen_label_rtx ();
2047 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2048 a new CODE_LABEL after reload, so we need a single pattern to
2049 emit the 3 necessary instructions. */
2050 emit_insn (gen_prologue_get_pc_and_set_got (xops
[0]));
2054 output_asm_insn (AS1 (call
,%P1
), xops
);
2055 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
2056 CODE_LABEL_NUMBER (xops
[1]));
2057 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
2058 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
2062 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2063 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2064 moved before any instruction which implicitly uses the got. */
2067 emit_insn (gen_blockage ());
2071 ix86_prologue (do_rtl
)
2077 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2078 || current_function_uses_const_pool
);
2079 long tsize
= get_frame_size ();
2081 int cfa_offset
= INCOMING_FRAME_SP_OFFSET
, cfa_store_offset
= cfa_offset
;
2083 xops
[0] = stack_pointer_rtx
;
2084 xops
[1] = frame_pointer_rtx
;
2085 xops
[2] = GEN_INT (tsize
);
2087 if (frame_pointer_needed
)
2091 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2092 gen_rtx_MEM (SImode
,
2093 gen_rtx (PRE_DEC
, SImode
,
2094 stack_pointer_rtx
)),
2095 frame_pointer_rtx
));
2097 RTX_FRAME_RELATED_P (insn
) = 1;
2098 insn
= emit_move_insn (xops
[1], xops
[0]);
2099 RTX_FRAME_RELATED_P (insn
) = 1;
2104 output_asm_insn ("push%L1 %1", xops
);
2105 #ifdef INCOMING_RETURN_ADDR_RTX
2106 if (dwarf2out_do_frame ())
2108 char *l
= dwarf2out_cfi_label ();
2110 cfa_store_offset
+= 4;
2111 cfa_offset
= cfa_store_offset
;
2112 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2113 dwarf2out_reg_save (l
, FRAME_POINTER_REGNUM
, - cfa_store_offset
);
2117 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
2118 #ifdef INCOMING_RETURN_ADDR_RTX
2119 if (dwarf2out_do_frame ())
2120 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM
, cfa_offset
);
2127 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
2131 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
2132 RTX_FRAME_RELATED_P (insn
) = 1;
2136 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
2137 #ifdef INCOMING_RETURN_ADDR_RTX
2138 if (dwarf2out_do_frame ())
2140 cfa_store_offset
+= tsize
;
2141 if (! frame_pointer_needed
)
2143 cfa_offset
= cfa_store_offset
;
2144 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, cfa_offset
);
2152 xops
[3] = gen_rtx_REG (SImode
, 0);
2154 emit_move_insn (xops
[3], xops
[2]);
2156 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
2158 xops
[3] = gen_rtx_MEM (FUNCTION_MODE
,
2159 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
2162 emit_call_insn (gen_rtx (CALL
, VOIDmode
, xops
[3], const0_rtx
));
2164 output_asm_insn (AS1 (call
,%P3
), xops
);
2167 /* Note If use enter it is NOT reversed args.
2168 This one is not reversed from intel!!
2169 I think enter is slower. Also sdb doesn't like it.
2170 But if you want it the code is:
2172 xops[3] = const0_rtx;
2173 output_asm_insn ("enter %2,%3", xops);
2177 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2178 for (regno
= limit
- 1; regno
>= 0; regno
--)
2179 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2180 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2182 xops
[0] = gen_rtx_REG (SImode
, regno
);
2185 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2186 gen_rtx_MEM (SImode
,
2187 gen_rtx (PRE_DEC
, SImode
,
2188 stack_pointer_rtx
)),
2191 RTX_FRAME_RELATED_P (insn
) = 1;
2195 output_asm_insn ("push%L0 %0", xops
);
2196 #ifdef INCOMING_RETURN_ADDR_RTX
2197 if (dwarf2out_do_frame ())
2199 char *l
= dwarf2out_cfi_label ();
2201 cfa_store_offset
+= 4;
2202 if (! frame_pointer_needed
)
2204 cfa_offset
= cfa_store_offset
;
2205 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2208 dwarf2out_reg_save (l
, regno
, - cfa_store_offset
);
2214 #ifdef SUBTARGET_PROLOGUE
2219 load_pic_register (do_rtl
);
2221 /* If we are profiling, make sure no instructions are scheduled before
2222 the call to mcount. However, if -fpic, the above call will have
2224 if ((profile_flag
|| profile_block_flag
)
2225 && ! pic_reg_used
&& do_rtl
)
2226 emit_insn (gen_blockage ());
2229 /* Return 1 if it is appropriate to emit `ret' instructions in the
2230 body of a function. Do this only if the epilogue is simple, needing a
2231 couple of insns. Prior to reloading, we can't tell how many registers
2232 must be saved, so return 0 then. Return 0 if there is no frame
2233 marker to de-allocate.
2235 If NON_SAVING_SETJMP is defined and true, then it is not possible
2236 for the epilogue to be simple, so return 0. This is a special case
2237 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2238 until final, but jump_optimize may need to know sooner if a
2242 ix86_can_use_return_insn_p ()
2246 int reglimit
= (frame_pointer_needed
2247 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2248 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2249 || current_function_uses_const_pool
);
2251 #ifdef NON_SAVING_SETJMP
2252 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2256 if (! reload_completed
)
2259 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2260 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2261 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2264 return nregs
== 0 || ! frame_pointer_needed
;
2267 /* This function generates the assembly code for function exit.
2268 FILE is an stdio stream to output the code to.
2269 SIZE is an int: how many units of temporary storage to deallocate. */
2272 function_epilogue (file
, size
)
2273 FILE *file ATTRIBUTE_UNUSED
;
2274 int size ATTRIBUTE_UNUSED
;
2279 /* Restore function stack, frame, and registers. */
2282 ix86_expand_epilogue ()
2288 ix86_epilogue (do_rtl
)
2292 register int nregs
, limit
;
2295 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2296 || current_function_uses_const_pool
);
2297 int sp_valid
= !frame_pointer_needed
|| current_function_sp_is_unchanging
;
2298 long tsize
= get_frame_size ();
2300 /* Compute the number of registers to pop */
2302 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2306 for (regno
= limit
- 1; regno
>= 0; regno
--)
2307 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2308 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2311 /* sp is often unreliable so we may have to go off the frame pointer. */
2313 offset
= - tsize
- (nregs
* UNITS_PER_WORD
);
2315 xops
[2] = stack_pointer_rtx
;
2317 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2318 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2319 moved before any instruction which implicitly uses the got. This
2320 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2322 Alternatively, this could be fixed by making the dependence on the
2323 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2325 if (flag_pic
|| profile_flag
|| profile_block_flag
)
2326 emit_insn (gen_blockage ());
2328 /* If we're only restoring one register and sp is not valid then
2329 using a move instruction to restore the register since it's
2330 less work than reloading sp and popping the register. Otherwise,
2331 restore sp (if necessary) and pop the registers. */
2333 if (nregs
> 1 || sp_valid
)
2337 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2339 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2341 output_asm_insn (AS2 (lea
%L2
,%0,%2), xops
);
2344 for (regno
= 0; regno
< limit
; regno
++)
2345 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2346 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2348 xops
[0] = gen_rtx_REG (SImode
, regno
);
2351 emit_insn (gen_pop (xops
[0]));
2353 output_asm_insn ("pop%L0 %0", xops
);
2358 for (regno
= 0; regno
< limit
; regno
++)
2359 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2360 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2362 xops
[0] = gen_rtx_REG (SImode
, regno
);
2363 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2366 emit_move_insn (xops
[0], xops
[1]);
2368 output_asm_insn (AS2 (mov
%L0
,%1,%0), xops
);
2373 if (frame_pointer_needed
)
2375 /* If not an i386, mov & pop is faster than "leave". */
2377 if (TARGET_USE_LEAVE
)
2380 emit_insn (gen_leave());
2382 output_asm_insn ("leave", xops
);
2386 xops
[0] = frame_pointer_rtx
;
2387 xops
[1] = stack_pointer_rtx
;
2391 emit_insn (gen_epilogue_set_stack_ptr());
2392 emit_insn (gen_pop (xops
[0]));
2396 output_asm_insn (AS2 (mov
%L2
,%0,%2), xops
);
2397 output_asm_insn ("pop%L0 %0", xops
);
2404 /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2405 use `pop' and not `add'. */
2406 int use_pop
= tsize
== 4;
2408 /* Use two pops only for the Pentium processors. */
2409 if (tsize
== 8 && !TARGET_386
&& !TARGET_486
)
2411 rtx retval
= current_function_return_rtx
;
2413 xops
[1] = gen_rtx_REG (SImode
, 1); /* %edx */
2415 /* This case is a bit more complex. Since we cannot pop into
2416 %ecx twice we need a second register. But this is only
2417 available if the return value is not of DImode in which
2418 case the %edx register is not available. */
2419 use_pop
= (retval
== NULL
2420 || ! reg_overlap_mentioned_p (xops
[1], retval
));
2425 xops
[0] = gen_rtx_REG (SImode
, 2); /* %ecx */
2429 /* We have to prevent the two pops here from being scheduled.
2430 GCC otherwise would try in some situation to put other
2431 instructions in between them which has a bad effect. */
2432 emit_insn (gen_blockage ());
2433 emit_insn (gen_pop (xops
[0]));
2435 emit_insn (gen_pop (xops
[1]));
2439 output_asm_insn ("pop%L0 %0", xops
);
2441 output_asm_insn ("pop%L1 %1", xops
);
2446 /* If there is no frame pointer, we must still release the frame. */
2447 xops
[0] = GEN_INT (tsize
);
2450 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2451 gen_rtx (PLUS
, SImode
, xops
[2], xops
[0])));
2453 output_asm_insn (AS2 (add
%L2
,%0,%2), xops
);
2457 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2458 if (profile_block_flag
== 2)
2460 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2464 if (current_function_pops_args
&& current_function_args_size
)
2466 xops
[1] = GEN_INT (current_function_pops_args
);
2468 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2469 asked to pop more, pop return address, do explicit add, and jump
2470 indirectly to the caller. */
2472 if (current_function_pops_args
>= 32768)
2474 /* ??? Which register to use here? */
2475 xops
[0] = gen_rtx_REG (SImode
, 2);
2479 emit_insn (gen_pop (xops
[0]));
2480 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2481 gen_rtx (PLUS
, SImode
, xops
[1], xops
[2])));
2482 emit_jump_insn (xops
[0]);
2486 output_asm_insn ("pop%L0 %0", xops
);
2487 output_asm_insn (AS2 (add
%L2
,%1,%2), xops
);
2488 output_asm_insn ("jmp %*%0", xops
);
2494 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2496 output_asm_insn ("ret %1", xops
);
2502 emit_jump_insn (gen_return_internal ());
2504 output_asm_insn ("ret", xops
);
2508 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2509 that is a valid memory address for an instruction.
2510 The MODE argument is the machine mode for the MEM expression
2511 that wants to use this address.
2513 On x86, legitimate addresses are:
2514 base movl (base),reg
2515 displacement movl disp,reg
2516 base + displacement movl disp(base),reg
2517 index + base movl (base,index),reg
2518 (index + base) + displacement movl disp(base,index),reg
2519 index*scale movl (,index,scale),reg
2520 index*scale + disp movl disp(,index,scale),reg
2521 index*scale + base movl (base,index,scale),reg
2522 (index*scale + base) + disp movl disp(base,index,scale),reg
2524 In each case, scale can be 1, 2, 4, 8. */
2526 /* This is exactly the same as print_operand_addr, except that
2527 it recognizes addresses instead of printing them.
2529 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2530 convert common non-canonical forms to canonical form so that they will
2533 #define ADDR_INVALID(msg,insn) \
2535 if (TARGET_DEBUG_ADDR) \
2537 fprintf (stderr, msg); \
2543 legitimate_pic_address_disp_p (disp
)
2546 if (GET_CODE (disp
) != CONST
)
2548 disp
= XEXP (disp
, 0);
2550 if (GET_CODE (disp
) == PLUS
)
2552 if (GET_CODE (XEXP (disp
, 1)) != CONST_INT
)
2554 disp
= XEXP (disp
, 0);
2557 if (GET_CODE (disp
) != UNSPEC
2558 || XVECLEN (disp
, 0) != 1)
2561 /* Must be @GOT or @GOTOFF. */
2562 if (XINT (disp
, 1) != 6
2563 && XINT (disp
, 1) != 7)
2566 if (GET_CODE (XVECEXP (disp
, 0, 0)) != SYMBOL_REF
2567 && GET_CODE (XVECEXP (disp
, 0, 0)) != LABEL_REF
)
2574 legitimate_address_p (mode
, addr
, strict
)
2575 enum machine_mode mode
;
2579 rtx base
= NULL_RTX
;
2580 rtx indx
= NULL_RTX
;
2581 rtx scale
= NULL_RTX
;
2582 rtx disp
= NULL_RTX
;
2584 if (TARGET_DEBUG_ADDR
)
2587 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2588 GET_MODE_NAME (mode
), strict
);
2593 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2596 else if (GET_CODE (addr
) == PLUS
)
2598 rtx op0
= XEXP (addr
, 0);
2599 rtx op1
= XEXP (addr
, 1);
2600 enum rtx_code code0
= GET_CODE (op0
);
2601 enum rtx_code code1
= GET_CODE (op1
);
2603 if (code0
== REG
|| code0
== SUBREG
)
2605 if (code1
== REG
|| code1
== SUBREG
)
2607 indx
= op0
; /* index + base */
2613 base
= op0
; /* base + displacement */
2618 else if (code0
== MULT
)
2620 indx
= XEXP (op0
, 0);
2621 scale
= XEXP (op0
, 1);
2623 if (code1
== REG
|| code1
== SUBREG
)
2624 base
= op1
; /* index*scale + base */
2627 disp
= op1
; /* index*scale + disp */
2630 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2632 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2633 scale
= XEXP (XEXP (op0
, 0), 1);
2634 base
= XEXP (op0
, 1);
2638 else if (code0
== PLUS
)
2640 indx
= XEXP (op0
, 0); /* index + base + disp */
2641 base
= XEXP (op0
, 1);
2647 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2652 else if (GET_CODE (addr
) == MULT
)
2654 indx
= XEXP (addr
, 0); /* index*scale */
2655 scale
= XEXP (addr
, 1);
2659 disp
= addr
; /* displacement */
2661 /* Allow arg pointer and stack pointer as index if there is not scaling */
2662 if (base
&& indx
&& !scale
2663 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2670 /* Validate base register:
2672 Don't allow SUBREG's here, it can lead to spill failures when the base
2673 is one word out of a two word structure, which is represented internally
2678 if (GET_CODE (base
) != REG
)
2680 ADDR_INVALID ("Base is not a register.\n", base
);
2684 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
2685 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2687 ADDR_INVALID ("Base is not valid.\n", base
);
2692 /* Validate index register:
2694 Don't allow SUBREG's here, it can lead to spill failures when the index
2695 is one word out of a two word structure, which is represented internally
2699 if (GET_CODE (indx
) != REG
)
2701 ADDR_INVALID ("Index is not a register.\n", indx
);
2705 if ((strict
&& ! REG_OK_FOR_INDEX_STRICT_P (indx
))
2706 || (! strict
&& ! REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2708 ADDR_INVALID ("Index is not valid.\n", indx
);
2713 abort (); /* scale w/o index invalid */
2715 /* Validate scale factor: */
2718 HOST_WIDE_INT value
;
2720 if (GET_CODE (scale
) != CONST_INT
)
2722 ADDR_INVALID ("Scale is not valid.\n", scale
);
2726 value
= INTVAL (scale
);
2727 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2729 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2734 /* Validate displacement. */
2737 if (!CONSTANT_ADDRESS_P (disp
))
2739 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2743 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2745 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2749 if (flag_pic
&& SYMBOLIC_CONST (disp
))
2751 if (! legitimate_pic_address_disp_p (disp
))
2753 ADDR_INVALID ("Displacement is an invalid PIC construct.\n",
2758 if (base
!= pic_offset_table_rtx
2759 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2761 ADDR_INVALID ("PIC displacement against invalid base.\n", disp
);
2766 else if (HALF_PIC_P ())
2768 if (! HALF_PIC_ADDRESS_P (disp
)
2769 || (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2771 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2778 if (TARGET_DEBUG_ADDR
)
2779 fprintf (stderr
, "Address is valid.\n");
2781 /* Everything looks valid, return true */
2785 /* Return a legitimate reference for ORIG (an address) using the
2786 register REG. If REG is 0, a new pseudo is generated.
2788 There are two types of references that must be handled:
2790 1. Global data references must load the address from the GOT, via
2791 the PIC reg. An insn is emitted to do this load, and the reg is
2794 2. Static data references, constant pool addresses, and code labels
2795 compute the address as an offset from the GOT, whose base is in
2796 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
2797 differentiate them from global data objects. The returned
2798 address is the PIC reg + an unspec constant.
2800 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2801 reg also appears in the address. */
2804 legitimize_pic_address (orig
, reg
)
2812 if (GET_CODE (addr
) == LABEL_REF
2813 || (GET_CODE (addr
) == SYMBOL_REF
2814 && (CONSTANT_POOL_ADDRESS_P (addr
)
2815 || SYMBOL_REF_FLAG (addr
))))
2817 /* This symbol may be referenced via a displacement from the PIC
2818 base address (@GOTOFF). */
2820 current_function_uses_pic_offset_table
= 1;
2821 new = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, addr
), 7);
2822 new = gen_rtx_CONST (VOIDmode
, new);
2823 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2827 emit_move_insn (reg
, new);
2831 else if (GET_CODE (addr
) == SYMBOL_REF
)
2833 /* This symbol must be referenced via a load from the
2834 Global Offset Table (@GOT). */
2836 current_function_uses_pic_offset_table
= 1;
2837 new = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, addr
), 6);
2838 new = gen_rtx_CONST (VOIDmode
, new);
2839 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2840 new = gen_rtx_MEM (Pmode
, new);
2841 RTX_UNCHANGING_P (new) = 1;
2844 reg
= gen_reg_rtx (Pmode
);
2845 emit_move_insn (reg
, new);
2850 if (GET_CODE (addr
) == CONST
)
2852 addr
= XEXP (addr
, 0);
2853 if (GET_CODE (addr
) == UNSPEC
)
2855 /* Check that the unspec is one of the ones we generate? */
2857 else if (GET_CODE (addr
) != PLUS
)
2860 if (GET_CODE (addr
) == PLUS
)
2862 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
2864 /* Check first to see if this is a constant offset from a @GOTOFF
2865 symbol reference. */
2866 if ((GET_CODE (op0
) == LABEL_REF
2867 || (GET_CODE (op0
) == SYMBOL_REF
2868 && (CONSTANT_POOL_ADDRESS_P (op0
)
2869 || SYMBOL_REF_FLAG (op0
))))
2870 && GET_CODE (op1
) == CONST_INT
)
2872 current_function_uses_pic_offset_table
= 1;
2873 new = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, op0
), 7);
2874 new = gen_rtx_PLUS (VOIDmode
, new, op1
);
2875 new = gen_rtx_CONST (VOIDmode
, new);
2876 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2880 emit_move_insn (reg
, new);
2886 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2887 new = legitimize_pic_address (XEXP (addr
, 1),
2888 base
== reg
? NULL_RTX
: reg
);
2890 if (GET_CODE (new) == CONST_INT
)
2891 new = plus_constant (base
, INTVAL (new));
2894 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
2896 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
2897 new = XEXP (new, 1);
2899 new = gen_rtx_PLUS (Pmode
, base
, new);
2907 /* Emit insns to move operands[1] into operands[0]. */
2910 emit_pic_move (operands
, mode
)
2912 enum machine_mode mode ATTRIBUTE_UNUSED
;
2914 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2916 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2917 operands
[1] = force_reg (Pmode
, operands
[1]);
2919 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2922 /* Try machine-dependent ways of modifying an illegitimate address
2923 to be legitimate. If we find one, return the new, valid address.
2924 This macro is used in only one place: `memory_address' in explow.c.
2926 OLDX is the address as it was before break_out_memory_refs was called.
2927 In some cases it is useful to look at this to decide what needs to be done.
2929 MODE and WIN are passed so that this macro can use
2930 GO_IF_LEGITIMATE_ADDRESS.
2932 It is always safe for this macro to do nothing. It exists to recognize
2933 opportunities to optimize the output.
2935 For the 80386, we handle X+REG by loading X into a register R and
2936 using R+REG. R will go in a general reg and indexing will be used.
2937 However, if REG is a broken-out memory address or multiplication,
2938 nothing needs to be done because REG can certainly go in a general reg.
2940 When -fpic is used, special handling is needed for symbolic references.
2941 See comments by legitimize_pic_address in i386.c for details. */
2944 legitimize_address (x
, oldx
, mode
)
2946 register rtx oldx ATTRIBUTE_UNUSED
;
2947 enum machine_mode mode
;
2952 if (TARGET_DEBUG_ADDR
)
2954 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2955 GET_MODE_NAME (mode
));
2959 if (flag_pic
&& SYMBOLIC_CONST (x
))
2960 return legitimize_pic_address (x
, 0);
2962 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2963 if (GET_CODE (x
) == ASHIFT
2964 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2965 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2968 x
= gen_rtx_MULT (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2969 GEN_INT (1 << log
));
2972 if (GET_CODE (x
) == PLUS
)
2974 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2976 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2977 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2978 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2981 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2982 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2983 GEN_INT (1 << log
));
2986 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2987 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2988 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2991 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2992 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2993 GEN_INT (1 << log
));
2996 /* Put multiply first if it isn't already. */
2997 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2999 rtx tmp
= XEXP (x
, 0);
3000 XEXP (x
, 0) = XEXP (x
, 1);
3005 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
3006 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
3007 created by virtual register instantiation, register elimination, and
3008 similar optimizations. */
3009 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
3012 x
= gen_rtx (PLUS
, Pmode
,
3013 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0),
3014 XEXP (XEXP (x
, 1), 0)),
3015 XEXP (XEXP (x
, 1), 1));
3019 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
3020 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
3021 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
3022 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
3023 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
3024 && CONSTANT_P (XEXP (x
, 1)))
3027 rtx other
= NULL_RTX
;
3029 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3031 constant
= XEXP (x
, 1);
3032 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
3034 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
3036 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
3037 other
= XEXP (x
, 1);
3045 x
= gen_rtx (PLUS
, Pmode
,
3046 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
3047 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
3048 plus_constant (other
, INTVAL (constant
)));
3052 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
3055 if (GET_CODE (XEXP (x
, 0)) == MULT
)
3058 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
3061 if (GET_CODE (XEXP (x
, 1)) == MULT
)
3064 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
3068 && GET_CODE (XEXP (x
, 1)) == REG
3069 && GET_CODE (XEXP (x
, 0)) == REG
)
3072 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
3075 x
= legitimize_pic_address (x
, 0);
3078 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
3081 if (GET_CODE (XEXP (x
, 0)) == REG
)
3083 register rtx temp
= gen_reg_rtx (Pmode
);
3084 register rtx val
= force_operand (XEXP (x
, 1), temp
);
3086 emit_move_insn (temp
, val
);
3092 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3094 register rtx temp
= gen_reg_rtx (Pmode
);
3095 register rtx val
= force_operand (XEXP (x
, 0), temp
);
3097 emit_move_insn (temp
, val
);
3107 /* Print an integer constant expression in assembler syntax. Addition
3108 and subtraction are the only arithmetic that may appear in these
3109 expressions. FILE is the stdio stream to write to, X is the rtx, and
3110 CODE is the operand print code from the output string. */
3113 output_pic_addr_const (file
, x
, code
)
3120 switch (GET_CODE (x
))
3130 assemble_name (file
, XSTR (x
, 0));
3131 if (code
== 'P' && ! SYMBOL_REF_FLAG (x
))
3132 fputs ("@PLT", file
);
3139 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3140 assemble_name (asm_out_file
, buf
);
3144 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3148 /* This used to output parentheses around the expression,
3149 but that does not work on the 386 (either ATT or BSD assembler). */
3150 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3154 if (GET_MODE (x
) == VOIDmode
)
3156 /* We can use %d if the number is <32 bits and positive. */
3157 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
3158 fprintf (file
, "0x%lx%08lx",
3159 (unsigned long) CONST_DOUBLE_HIGH (x
),
3160 (unsigned long) CONST_DOUBLE_LOW (x
));
3162 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3165 /* We can't handle floating point constants;
3166 PRINT_OPERAND must handle them. */
3167 output_operand_lossage ("floating constant misused");
3171 /* Some assemblers need integer constants to appear first. */
3172 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3174 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3175 fprintf (file
, "+");
3176 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3178 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3180 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3181 fprintf (file
, "+");
3182 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3189 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3190 fprintf (file
, "-");
3191 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3195 if (XVECLEN (x
, 0) != 1)
3197 output_pic_addr_const (file
, XVECEXP (x
, 0, 0), code
);
3198 switch (XINT (x
, 1))
3201 fputs ("@GOT", file
);
3204 fputs ("@GOTOFF", file
);
3207 fputs ("@PLT", file
);
3210 output_operand_lossage ("invalid UNSPEC as operand");
3216 output_operand_lossage ("invalid expression as operand");
3221 put_jump_code (code
, reverse
, file
)
3226 int flags
= cc_prev_status
.flags
;
3227 int ieee
= (TARGET_IEEE_FP
&& (flags
& CC_IN_80387
));
3230 if (flags
& CC_Z_IN_NOT_C
)
3234 fputs (reverse
? "c" : "nc", file
);
3238 fputs (reverse
? "nc" : "c", file
);
3249 suffix
= reverse
? "ae" : "b";
3254 suffix
= reverse
? "ne" : "e";
3257 suffix
= reverse
? "ne" : "e";
3260 suffix
= reverse
? "e" : "ne";
3265 fputs (suffix
, file
);
3268 if (flags
& CC_TEST_AX
)
3270 if ((flags
& CC_NO_OVERFLOW
) && (code
== LE
|| code
== GT
))
3273 code
= reverse_condition (code
);
3285 suffix
= flags
& CC_IN_80387
? "a" : "g";
3293 if (flags
& CC_NO_OVERFLOW
)
3296 suffix
= flags
& CC_IN_80387
? "b" : "l";
3304 if (flags
& CC_NO_OVERFLOW
)
3307 suffix
= flags
& CC_IN_80387
? "ae" : "ge";
3315 suffix
= flags
& CC_IN_80387
? "be" : "le";
3325 fputs (suffix
, file
);
3328 /* Append the correct conditional move suffix which corresponds to CODE. */
3331 put_condition_code (code
, reverse_cc
, mode
, file
)
3334 enum mode_class mode
;
3337 int ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
3338 && ! (cc_prev_status
.flags
& CC_FCOMI
));
3339 if (reverse_cc
&& ! ieee
)
3340 code
= reverse_condition (code
);
3342 if (mode
== MODE_INT
)
3346 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3353 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3360 if (cc_prev_status
.flags
& CC_NO_OVERFLOW
)
3375 if (cc_prev_status
.flags
& CC_NO_OVERFLOW
)
3398 output_operand_lossage ("Invalid %%C operand");
3401 else if (mode
== MODE_FLOAT
)
3405 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
);
3408 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
);
3411 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3414 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3417 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3420 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3423 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3426 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3429 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3432 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3435 output_operand_lossage ("Invalid %%C operand");
3440 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3441 C -- print opcode suffix for set/cmov insn.
3442 c -- like C, but print reversed condition
3443 F -- print opcode suffix for fcmov insn.
3444 f -- like F, but print reversed condition
3445 D -- print the opcode suffix for a jump
3446 d -- like D, but print reversed condition
3447 R -- print the prefix for register names.
3448 z -- print the opcode suffix for the size of the current operand.
3449 * -- print a star (in certain assembler syntax)
3450 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3451 J -- print the appropriate jump operand.
3452 s -- print a shift double count, followed by the assemblers argument
3454 b -- print the QImode name of the register for the indicated operand.
3455 %b0 would print %al if operands[0] is reg 0.
3456 w -- likewise, print the HImode name of the register.
3457 k -- likewise, print the SImode name of the register.
3458 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3459 y -- print "st(0)" instead of "st" as a register.
3460 P -- print as a PIC constant */
3463 print_operand (file
, x
, code
)
3478 PUT_OP_SIZE (code
, 'l', file
);
3482 PUT_OP_SIZE (code
, 'w', file
);
3486 PUT_OP_SIZE (code
, 'b', file
);
3490 PUT_OP_SIZE (code
, 'l', file
);
3494 PUT_OP_SIZE (code
, 's', file
);
3498 PUT_OP_SIZE (code
, 't', file
);
3502 /* 387 opcodes don't get size suffixes if the operands are
3505 if (STACK_REG_P (x
))
3508 /* this is the size of op from size of operand */
3509 switch (GET_MODE_SIZE (GET_MODE (x
)))
3512 PUT_OP_SIZE ('B', 'b', file
);
3516 PUT_OP_SIZE ('W', 'w', file
);
3520 if (GET_MODE (x
) == SFmode
)
3522 PUT_OP_SIZE ('S', 's', file
);
3526 PUT_OP_SIZE ('L', 'l', file
);
3530 PUT_OP_SIZE ('T', 't', file
);
3534 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3536 #ifdef GAS_MNEMONICS
3537 PUT_OP_SIZE ('Q', 'q', file
);
3540 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3544 PUT_OP_SIZE ('Q', 'l', file
);
3558 switch (GET_CODE (x
))
3560 /* These conditions are appropriate for testing the result
3561 of an arithmetic operation, not for a compare operation.
3562 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3563 CC_Z_IN_NOT_C false and not floating point. */
3564 case NE
: fputs ("jne", file
); return;
3565 case EQ
: fputs ("je", file
); return;
3566 case GE
: fputs ("jns", file
); return;
3567 case LT
: fputs ("js", file
); return;
3568 case GEU
: fputs ("jmp", file
); return;
3569 case GTU
: fputs ("jne", file
); return;
3570 case LEU
: fputs ("je", file
); return;
3571 case LTU
: fputs ("#branch never", file
); return;
3573 /* no matching branches for GT nor LE */
3580 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3582 PRINT_OPERAND (file
, x
, 0);
3583 fputs (AS2C (,) + 1, file
);
3589 put_jump_code (GET_CODE (x
), 0, file
);
3593 put_jump_code (GET_CODE (x
), 1, file
);
3596 /* This is used by the conditional move instructions. */
3598 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3601 /* Like above, but reverse condition */
3603 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3606 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3609 /* Like above, but reverse condition */
3611 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3618 sprintf (str
, "invalid operand code `%c'", code
);
3619 output_operand_lossage (str
);
3624 if (GET_CODE (x
) == REG
)
3626 PRINT_REG (x
, code
, file
);
3629 else if (GET_CODE (x
) == MEM
)
3631 PRINT_PTR (x
, file
);
3632 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3635 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3637 output_addr_const (file
, XEXP (x
, 0));
3640 output_address (XEXP (x
, 0));
3643 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3648 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3649 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3650 PRINT_IMMED_PREFIX (file
);
3651 fprintf (file
, "0x%lx", l
);
3654 /* These float cases don't actually occur as immediate operands. */
3655 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3660 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3661 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3662 fprintf (file
, "%s", dstr
);
3665 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3670 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3671 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3672 fprintf (file
, "%s", dstr
);
3678 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3679 PRINT_IMMED_PREFIX (file
);
3680 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3681 || GET_CODE (x
) == LABEL_REF
)
3682 PRINT_OFFSET_PREFIX (file
);
3685 output_pic_addr_const (file
, x
, code
);
3687 output_addr_const (file
, x
);
3691 /* Print a memory operand whose address is ADDR. */
3694 print_operand_address (file
, addr
)
3698 register rtx reg1
, reg2
, breg
, ireg
;
3701 switch (GET_CODE (addr
))
3705 fprintf (file
, "%se", RP
);
3706 fputs (hi_reg_name
[REGNO (addr
)], file
);
3716 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3718 offset
= XEXP (addr
, 0);
3719 addr
= XEXP (addr
, 1);
3721 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3723 offset
= XEXP (addr
, 1);
3724 addr
= XEXP (addr
, 0);
3727 if (GET_CODE (addr
) != PLUS
)
3729 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3730 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3731 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3732 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3733 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3734 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3735 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3736 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3738 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3755 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3756 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3761 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3767 if (ireg
!= 0 || breg
!= 0)
3774 output_pic_addr_const (file
, addr
, 0);
3775 else if (GET_CODE (addr
) == LABEL_REF
)
3776 output_asm_label (addr
);
3778 output_addr_const (file
, addr
);
3781 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3783 scale
= INTVAL (XEXP (ireg
, 1));
3784 ireg
= XEXP (ireg
, 0);
3787 /* The stack pointer can only appear as a base register,
3788 never an index register, so exchange the regs if it is wrong. */
3790 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3799 /* output breg+ireg*scale */
3800 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3808 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3810 scale
= INTVAL (XEXP (addr
, 0));
3811 ireg
= XEXP (addr
, 1);
3815 scale
= INTVAL (XEXP (addr
, 1));
3816 ireg
= XEXP (addr
, 0);
3819 output_addr_const (file
, const0_rtx
);
3820 PRINT_B_I_S (NULL_RTX
, ireg
, scale
, file
);
3825 if (GET_CODE (addr
) == CONST_INT
3826 && INTVAL (addr
) < 0x8000
3827 && INTVAL (addr
) >= -0x8000)
3828 fprintf (file
, "%d", (int) INTVAL (addr
));
3832 output_pic_addr_const (file
, addr
, 0);
3834 output_addr_const (file
, addr
);
3839 /* Set the cc_status for the results of an insn whose pattern is EXP.
3840 On the 80386, we assume that only test and compare insns, as well
3841 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3842 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3843 Also, we assume that jumps, moves and sCOND don't affect the condition
3844 codes. All else clobbers the condition codes, by assumption.
3846 We assume that ALL integer add, minus, etc. instructions effect the
3847 condition codes. This MUST be consistent with i386.md.
3849 We don't record any float test or compare - the redundant test &
3850 compare check in final.c does not handle stack-like regs correctly. */
3853 notice_update_cc (exp
)
3856 if (GET_CODE (exp
) == SET
)
3858 /* Jumps do not alter the cc's. */
3859 if (SET_DEST (exp
) == pc_rtx
)
3862 /* Moving register or memory into a register:
3863 it doesn't alter the cc's, but it might invalidate
3864 the RTX's which we remember the cc's came from.
3865 (Note that moving a constant 0 or 1 MAY set the cc's). */
3866 if (REG_P (SET_DEST (exp
))
3867 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3868 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'
3869 || (GET_CODE (SET_SRC (exp
)) == IF_THEN_ELSE
3870 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp
))) == MODE_INT
)))
3872 if (cc_status
.value1
3873 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3874 cc_status
.value1
= 0;
3876 if (cc_status
.value2
3877 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3878 cc_status
.value2
= 0;
3883 /* Moving register into memory doesn't alter the cc's.
3884 It may invalidate the RTX's which we remember the cc's came from. */
3885 if (GET_CODE (SET_DEST (exp
)) == MEM
3886 && (REG_P (SET_SRC (exp
))
3887 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3889 if (cc_status
.value1
3890 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3891 cc_status
.value1
= 0;
3892 if (cc_status
.value2
3893 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3894 cc_status
.value2
= 0;
3899 /* Function calls clobber the cc's. */
3900 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3906 /* Tests and compares set the cc's in predictable ways. */
3907 else if (SET_DEST (exp
) == cc0_rtx
)
3910 cc_status
.value1
= SET_SRC (exp
);
3914 /* Certain instructions effect the condition codes. */
3915 else if (GET_MODE (SET_SRC (exp
)) == SImode
3916 || GET_MODE (SET_SRC (exp
)) == HImode
3917 || GET_MODE (SET_SRC (exp
)) == QImode
)
3918 switch (GET_CODE (SET_SRC (exp
)))
3920 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3921 /* Shifts on the 386 don't set the condition codes if the
3922 shift count is zero. */
3923 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3929 /* We assume that the CONST_INT is non-zero (this rtx would
3930 have been deleted if it were zero. */
3932 case PLUS
: case MINUS
: case NEG
:
3933 case AND
: case IOR
: case XOR
:
3934 cc_status
.flags
= CC_NO_OVERFLOW
;
3935 cc_status
.value1
= SET_SRC (exp
);
3936 cc_status
.value2
= SET_DEST (exp
);
3939 /* This is the bsf pattern used by ffs. */
3941 if (XINT (SET_SRC (exp
), 1) == 5)
3943 /* Only the Z flag is defined after bsf. */
3945 = CC_NOT_POSITIVE
| CC_NOT_NEGATIVE
| CC_NO_OVERFLOW
;
3946 cc_status
.value1
= XVECEXP (SET_SRC (exp
), 0, 0);
3947 cc_status
.value2
= 0;
3960 else if (GET_CODE (exp
) == PARALLEL
3961 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3963 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3965 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3969 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3971 cc_status
.flags
|= CC_IN_80387
;
3972 if (0 && TARGET_CMOVE
&& stack_regs_mentioned_p
3973 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3974 cc_status
.flags
|= CC_FCOMI
;
3977 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3989 /* Split one or more DImode RTL references into pairs of SImode
3990 references. The RTL can be REG, offsettable MEM, integer constant, or
3991 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3992 split and "num" is its length. lo_half and hi_half are output arrays
3993 that parallel "operands". */
3996 split_di (operands
, num
, lo_half
, hi_half
)
3999 rtx lo_half
[], hi_half
[];
4003 rtx op
= operands
[num
];
4004 if (! reload_completed
)
4006 lo_half
[num
] = gen_lowpart (SImode
, op
);
4007 hi_half
[num
] = gen_highpart (SImode
, op
);
4009 else if (GET_CODE (op
) == REG
)
4011 lo_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
));
4012 hi_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
) + 1);
4014 else if (CONSTANT_P (op
))
4015 split_double (op
, &lo_half
[num
], &hi_half
[num
]);
4016 else if (offsettable_memref_p (op
))
4018 rtx lo_addr
= XEXP (op
, 0);
4019 rtx hi_addr
= XEXP (adj_offsettable_operand (op
, 4), 0);
4020 lo_half
[num
] = change_address (op
, SImode
, lo_addr
);
4021 hi_half
[num
] = change_address (op
, SImode
, hi_addr
);
4028 /* Return 1 if this is a valid binary operation on a 387.
4029 OP is the expression matched, and MODE is its mode. */
4032 binary_387_op (op
, mode
)
4034 enum machine_mode mode
;
4036 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4039 switch (GET_CODE (op
))
4045 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
4052 /* Return 1 if this is a valid shift or rotate operation on a 386.
4053 OP is the expression matched, and MODE is its mode. */
4058 enum machine_mode mode
;
4060 rtx operand
= XEXP (op
, 0);
4062 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4065 if (GET_MODE (operand
) != GET_MODE (op
)
4066 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
4069 return (GET_CODE (op
) == ASHIFT
4070 || GET_CODE (op
) == ASHIFTRT
4071 || GET_CODE (op
) == LSHIFTRT
4072 || GET_CODE (op
) == ROTATE
4073 || GET_CODE (op
) == ROTATERT
);
4076 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
4077 MODE is not used. */
4080 VOIDmode_compare_op (op
, mode
)
4082 enum machine_mode mode ATTRIBUTE_UNUSED
;
4084 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
4087 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
4088 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
4089 is the expression of the binary operation. The output may either be
4090 emitted here, or returned to the caller, like all output_* functions.
4092 There is no guarantee that the operands are the same mode, as they
4093 might be within FLOAT or FLOAT_EXTEND expressions. */
4096 output_387_binary_op (insn
, operands
)
4102 static char buf
[100];
4104 switch (GET_CODE (operands
[3]))
4107 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
4108 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
4115 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
4116 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
4123 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
4124 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
4131 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
4132 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
4142 strcpy (buf
, base_op
);
4144 switch (GET_CODE (operands
[3]))
4148 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
4151 operands
[2] = operands
[1];
4155 if (GET_CODE (operands
[2]) == MEM
)
4156 return strcat (buf
, AS1 (%z2
,%2));
4158 if (NON_STACK_REG_P (operands
[1]))
4160 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4164 else if (NON_STACK_REG_P (operands
[2]))
4166 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
4170 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
4172 if (STACK_TOP_P (operands
[0]))
4173 return strcat (buf
, AS2 (p
,%0,%2));
4175 return strcat (buf
, AS2 (p
,%2,%0));
4178 if (STACK_TOP_P (operands
[0]))
4179 return strcat (buf
, AS2C (%y2
,%0));
4181 return strcat (buf
, AS2C (%2,%0));
4185 if (GET_CODE (operands
[1]) == MEM
)
4186 return strcat (buf
, AS1 (r
%z1
,%1));
4188 if (GET_CODE (operands
[2]) == MEM
)
4189 return strcat (buf
, AS1 (%z2
,%2));
4191 if (NON_STACK_REG_P (operands
[1]))
4193 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
4197 else if (NON_STACK_REG_P (operands
[2]))
4199 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
4203 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
4206 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
4208 if (STACK_TOP_P (operands
[0]))
4209 return strcat (buf
, AS2 (p
,%0,%2));
4211 return strcat (buf
, AS2 (rp
,%2,%0));
4214 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
4216 if (STACK_TOP_P (operands
[0]))
4217 return strcat (buf
, AS2 (rp
,%0,%1));
4219 return strcat (buf
, AS2 (p
,%1,%0));
4222 if (STACK_TOP_P (operands
[0]))
4224 if (STACK_TOP_P (operands
[1]))
4225 return strcat (buf
, AS2C (%y2
,%0));
4227 return strcat (buf
, AS2 (r
,%y1
,%0));
4229 else if (STACK_TOP_P (operands
[1]))
4230 return strcat (buf
, AS2C (%1,%0));
4232 return strcat (buf
, AS2 (r
,%2,%0));
4239 /* Output code for INSN to convert a float to a signed int. OPERANDS
4240 are the insn operands. The output may be SFmode or DFmode and the
4241 input operand may be SImode or DImode. As a special case, make sure
4242 that the 387 stack top dies if the output mode is DImode, because the
4243 hardware requires this. */
4246 output_fix_trunc (insn
, operands
)
4250 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
4253 if (! STACK_TOP_P (operands
[1]))
4256 xops
[0] = GEN_INT (12);
4257 xops
[1] = operands
[4];
4259 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
4260 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
4261 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
4262 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
4263 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
4265 if (NON_STACK_REG_P (operands
[0]))
4266 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
4268 else if (GET_CODE (operands
[0]) == MEM
)
4271 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
4272 else if (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
)
4274 /* There is no DImode version of this without a stack pop, so
4275 we must emulate it. It doesn't matter much what the second
4276 instruction is, because the value being pushed on the FP stack
4277 is not used except for the following stack popping store.
4278 This case can only happen without optimization, so it doesn't
4279 matter that it is inefficient. */
4280 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
4281 output_asm_insn (AS1 (fild
%z0
,%0), operands
);
4284 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
4289 return AS1 (fldc
%W2
,%2);
4292 /* Output code for INSN to compare OPERANDS. The two operands might
4293 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4294 expression. If the compare is in mode CCFPEQmode, use an opcode that
4295 will not fault if a qNaN is present. */
4298 output_float_compare (insn
, operands
)
4303 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
4304 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
4307 if (0 && TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
4309 cc_status
.flags
|= CC_FCOMI
;
4310 cc_prev_status
.flags
&= ~CC_TEST_AX
;
4313 if (! STACK_TOP_P (operands
[0]))
4316 operands
[0] = operands
[1];
4318 cc_status
.flags
|= CC_REVERSED
;
4321 if (! STACK_TOP_P (operands
[0]))
4324 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
4326 if (STACK_REG_P (operands
[1])
4328 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
4329 && REGNO (operands
[1]) != FIRST_STACK_REG
)
4331 /* If both the top of the 387 stack dies, and the other operand
4332 is also a stack register that dies, then this must be a
4333 `fcompp' float compare */
4335 if (unordered_compare
)
4337 if (cc_status
.flags
& CC_FCOMI
)
4339 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
4340 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4344 output_asm_insn ("fucompp", operands
);
4348 if (cc_status
.flags
& CC_FCOMI
)
4350 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
4351 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4355 output_asm_insn ("fcompp", operands
);
4360 static char buf
[100];
4362 /* Decide if this is the integer or float compare opcode, or the
4363 unordered float compare. */
4365 if (unordered_compare
)
4366 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
4367 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
4368 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
4370 strcpy (buf
, "ficom");
4372 /* Modify the opcode if the 387 stack is to be popped. */
4377 if (NON_STACK_REG_P (operands
[1]))
4378 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4379 else if (cc_status
.flags
& CC_FCOMI
)
4381 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
4385 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
4388 /* Now retrieve the condition code. */
4390 return output_fp_cc0_set (insn
);
4393 /* Output opcodes to transfer the results of FP compare or test INSN
4394 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4395 result of the compare or test is unordered, no comparison operator
4396 succeeds except NE. Return an output template, if any. */
4399 output_fp_cc0_set (insn
)
4406 xops
[0] = gen_rtx_REG (HImode
, 0);
4407 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
4409 if (! TARGET_IEEE_FP
)
4411 if (!(cc_status
.flags
& CC_REVERSED
))
4413 next
= next_cc0_user (insn
);
4415 if (GET_CODE (next
) == JUMP_INSN
4416 && GET_CODE (PATTERN (next
)) == SET
4417 && SET_DEST (PATTERN (next
)) == pc_rtx
4418 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4419 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4420 else if (GET_CODE (PATTERN (next
)) == SET
)
4421 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4425 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
4426 || code
== LE
|| code
== GE
)
4428 /* We will test eax directly. */
4429 cc_status
.flags
|= CC_TEST_AX
;
4437 next
= next_cc0_user (insn
);
4438 if (next
== NULL_RTX
)
4441 if (GET_CODE (next
) == JUMP_INSN
4442 && GET_CODE (PATTERN (next
)) == SET
4443 && SET_DEST (PATTERN (next
)) == pc_rtx
4444 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4445 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4446 else if (GET_CODE (PATTERN (next
)) == SET
)
4448 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4449 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4451 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4454 else if (GET_CODE (PATTERN (next
)) == PARALLEL
4455 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
4457 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
4458 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
4460 code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
4465 xops
[0] = gen_rtx_REG (QImode
, 0);
4470 xops
[1] = GEN_INT (0x45);
4471 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4476 xops
[1] = GEN_INT (0x45);
4477 xops
[2] = GEN_INT (0x01);
4478 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4479 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4484 xops
[1] = GEN_INT (0x05);
4485 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4490 xops
[1] = GEN_INT (0x45);
4491 xops
[2] = GEN_INT (0x40);
4492 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4493 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
4494 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4499 xops
[1] = GEN_INT (0x45);
4500 xops
[2] = GEN_INT (0x40);
4501 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4502 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4507 xops
[1] = GEN_INT (0x44);
4508 xops
[2] = GEN_INT (0x40);
4509 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4510 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
4525 #define MAX_386_STACK_LOCALS 2
4527 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4529 /* Define the structure for the machine field in struct function. */
4530 struct machine_function
4532 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4534 char pic_label_name
[256];
4537 /* Functions to save and restore i386_stack_locals.
4538 These will be called, via pointer variables,
4539 from push_function_context and pop_function_context. */
4542 save_386_machine_status (p
)
4546 = (struct machine_function
*) xmalloc (sizeof (struct machine_function
));
4547 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
4548 sizeof i386_stack_locals
);
4549 p
->machine
->pic_label_rtx
= pic_label_rtx
;
4550 bcopy (pic_label_name
, p
->machine
->pic_label_name
, 256);
4554 restore_386_machine_status (p
)
4557 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
4558 sizeof i386_stack_locals
);
4559 pic_label_rtx
= p
->machine
->pic_label_rtx
;
4560 bcopy (p
->machine
->pic_label_name
, pic_label_name
, 256);
4565 /* Clear stack slot assignments remembered from previous functions.
4566 This is called from INIT_EXPANDERS once before RTL is emitted for each
4570 clear_386_stack_locals ()
4572 enum machine_mode mode
;
4575 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4576 mode
= (enum machine_mode
) ((int) mode
+ 1))
4577 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4578 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4580 pic_label_rtx
= NULL_RTX
;
4581 bzero (pic_label_name
, 256);
4582 /* Arrange to save and restore i386_stack_locals around nested functions. */
4583 save_machine_status
= save_386_machine_status
;
4584 restore_machine_status
= restore_386_machine_status
;
4587 /* Return a MEM corresponding to a stack slot with mode MODE.
4588 Allocate a new slot if necessary.
4590 The RTL for a function can have several slots available: N is
4591 which slot to use. */
4594 assign_386_stack_local (mode
, n
)
4595 enum machine_mode mode
;
4598 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4601 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4602 i386_stack_locals
[(int) mode
][n
]
4603 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4605 return i386_stack_locals
[(int) mode
][n
];
4610 enum machine_mode mode ATTRIBUTE_UNUSED
;
4612 return (GET_CODE (op
) == MULT
);
4617 enum machine_mode mode ATTRIBUTE_UNUSED
;
4619 return (GET_CODE (op
) == DIV
);
4623 /* Create a new copy of an rtx.
4624 Recursively copies the operands of the rtx,
4625 except for those few rtx codes that are sharable.
4626 Doesn't share CONST */
4634 register RTX_CODE code
;
4635 register char *format_ptr
;
4637 code
= GET_CODE (orig
);
4650 /* SCRATCH must be shared because they represent distinct values. */
4655 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4656 a LABEL_REF, it isn't sharable. */
4657 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4658 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4659 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4663 /* A MEM with a constant address is not sharable. The problem is that
4664 the constant address may need to be reloaded. If the mem is shared,
4665 then reloading one copy of this mem will cause all copies to appear
4666 to have been reloaded. */
4669 copy
= rtx_alloc (code
);
4670 PUT_MODE (copy
, GET_MODE (orig
));
4671 copy
->in_struct
= orig
->in_struct
;
4672 copy
->volatil
= orig
->volatil
;
4673 copy
->unchanging
= orig
->unchanging
;
4674 copy
->integrated
= orig
->integrated
;
4676 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4678 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4680 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4682 switch (*format_ptr
++)
4685 XEXP (copy
, i
) = XEXP (orig
, i
);
4686 if (XEXP (orig
, i
) != NULL
)
4687 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4692 XEXP (copy
, i
) = XEXP (orig
, i
);
4697 XVEC (copy
, i
) = XVEC (orig
, i
);
4698 if (XVEC (orig
, i
) != NULL
)
4700 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4701 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4702 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4707 XWINT (copy
, i
) = XWINT (orig
, i
);
4711 XINT (copy
, i
) = XINT (orig
, i
);
4716 XSTR (copy
, i
) = XSTR (orig
, i
);
4727 /* Try to rewrite a memory address to make it valid */
4730 rewrite_address (mem_rtx
)
4733 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4735 int offset_adjust
= 0;
4736 int was_only_offset
= 0;
4737 rtx mem_addr
= XEXP (mem_rtx
, 0);
4738 char *storage
= oballoc (0);
4740 int is_spill_rtx
= 0;
4742 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4743 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4745 if (GET_CODE (mem_addr
) == PLUS
4746 && GET_CODE (XEXP (mem_addr
, 1)) == PLUS
4747 && GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4749 /* This part is utilized by the combiner. */
4751 = gen_rtx (PLUS
, GET_MODE (mem_addr
),
4752 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4753 XEXP (mem_addr
, 0), XEXP (XEXP (mem_addr
, 1), 0)),
4754 XEXP (XEXP (mem_addr
, 1), 1));
4756 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4758 XEXP (mem_rtx
, 0) = ret_rtx
;
4759 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4766 /* This part is utilized by loop.c.
4767 If the address contains PLUS (reg,const) and this pattern is invalid
4768 in this case - try to rewrite the address to make it valid. */
4769 storage
= oballoc (0);
4770 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4772 /* Find the base index and offset elements of the memory address. */
4773 if (GET_CODE (mem_addr
) == PLUS
)
4775 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4777 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4778 base_rtx
= XEXP (mem_addr
, 1), index_rtx
= XEXP (mem_addr
, 0);
4780 base_rtx
= XEXP (mem_addr
, 0), offset_rtx
= XEXP (mem_addr
, 1);
4783 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4785 index_rtx
= XEXP (mem_addr
, 0);
4786 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4787 base_rtx
= XEXP (mem_addr
, 1);
4789 offset_rtx
= XEXP (mem_addr
, 1);
4792 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4794 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
4795 && GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
4796 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0))
4798 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1))
4800 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1))
4802 && GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
4803 && GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4805 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4806 offset_rtx
= XEXP (mem_addr
, 1);
4807 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4808 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4812 offset_rtx
= XEXP (mem_addr
, 1);
4813 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4814 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4818 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4820 was_only_offset
= 1;
4823 offset_rtx
= XEXP (mem_addr
, 1);
4824 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4825 if (offset_adjust
== 0)
4827 XEXP (mem_rtx
, 0) = offset_rtx
;
4828 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4838 else if (GET_CODE (mem_addr
) == MULT
)
4839 index_rtx
= mem_addr
;
4846 if (index_rtx
!= 0 && GET_CODE (index_rtx
) == MULT
)
4848 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4854 scale_rtx
= XEXP (index_rtx
, 1);
4855 scale
= INTVAL (scale_rtx
);
4856 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4859 /* Now find which of the elements are invalid and try to fix them. */
4860 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4862 offset_adjust
= INTVAL (index_rtx
) * scale
;
4864 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4865 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4866 else if (offset_rtx
== 0)
4867 offset_rtx
= const0_rtx
;
4869 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4870 XEXP (mem_rtx
, 0) = offset_rtx
;
4874 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
4875 && GET_CODE (XEXP (base_rtx
, 0)) == REG
4876 && GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4878 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4879 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4882 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4884 offset_adjust
+= INTVAL (base_rtx
);
4888 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
4889 && GET_CODE (XEXP (index_rtx
, 0)) == REG
4890 && GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4892 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4893 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4898 if (! LEGITIMATE_INDEX_P (index_rtx
)
4899 && ! (index_rtx
== stack_pointer_rtx
&& scale
== 1
4900 && base_rtx
== NULL
))
4909 if (! LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4916 if (offset_adjust
!= 0)
4918 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4919 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4921 offset_rtx
= const0_rtx
;
4929 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4930 gen_rtx (MULT
, GET_MODE (index_rtx
),
4931 index_rtx
, scale_rtx
),
4934 if (GET_CODE (offset_rtx
) != CONST_INT
4935 || INTVAL (offset_rtx
) != 0)
4936 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4937 ret_rtx
, offset_rtx
);
4941 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4942 index_rtx
, base_rtx
);
4944 if (GET_CODE (offset_rtx
) != CONST_INT
4945 || INTVAL (offset_rtx
) != 0)
4946 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4947 ret_rtx
, offset_rtx
);
4954 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
),
4955 index_rtx
, scale_rtx
);
4957 if (GET_CODE (offset_rtx
) != CONST_INT
4958 || INTVAL (offset_rtx
) != 0)
4959 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4960 ret_rtx
, offset_rtx
);
4964 if (GET_CODE (offset_rtx
) == CONST_INT
4965 && INTVAL (offset_rtx
) == 0)
4966 ret_rtx
= index_rtx
;
4968 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4969 index_rtx
, offset_rtx
);
4977 if (GET_CODE (offset_rtx
) == CONST_INT
4978 && INTVAL (offset_rtx
) == 0)
4981 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
,
4984 else if (was_only_offset
)
4985 ret_rtx
= offset_rtx
;
4993 XEXP (mem_rtx
, 0) = ret_rtx
;
4994 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
5005 /* Return 1 if the first insn to set cc before INSN also sets the register
5006 REG_RTX; otherwise return 0. */
5008 last_to_set_cc (reg_rtx
, insn
)
5011 rtx prev_insn
= PREV_INSN (insn
);
5015 if (GET_CODE (prev_insn
) == NOTE
)
5018 else if (GET_CODE (prev_insn
) == INSN
)
5020 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
5023 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
5025 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
5031 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
5038 prev_insn
= PREV_INSN (prev_insn
);
5045 doesnt_set_condition_code (pat
)
5048 switch (GET_CODE (pat
))
5061 sets_condition_code (pat
)
5064 switch (GET_CODE (pat
))
5086 str_immediate_operand (op
, mode
)
5088 enum machine_mode mode ATTRIBUTE_UNUSED
;
5090 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
5100 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
5101 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
5102 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
5103 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
5109 /* Return 1 if the mode of the SET_DEST of insn is floating point
5110 and it is not an fld or a move from memory to memory.
5111 Otherwise return 0 */
5117 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
5118 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
5119 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
5120 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
5121 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
5122 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
5123 && GET_CODE (SET_SRC (PATTERN (insn
))) != MEM
)
5129 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
5130 memory and the source is a register. */
5136 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
5137 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
5138 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
5139 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
5140 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
5141 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
5147 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
5148 or index to reference memory.
5149 otherwise return 0 */
5152 agi_dependent (insn
, dep_insn
)
5155 if (GET_CODE (dep_insn
) == INSN
5156 && GET_CODE (PATTERN (dep_insn
)) == SET
5157 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
5158 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
);
5160 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
5161 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
5162 && push_operand (SET_DEST (PATTERN (dep_insn
)),
5163 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
5164 return reg_mentioned_in_mem (stack_pointer_rtx
, insn
);
5169 /* Return 1 if reg is used in rtl as a base or index for a memory ref
5170 otherwise return 0. */
5173 reg_mentioned_in_mem (reg
, rtl
)
5178 register enum rtx_code code
;
5183 code
= GET_CODE (rtl
);
5201 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
5204 fmt
= GET_RTX_FORMAT (code
);
5205 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5209 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
5210 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
5214 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
5221 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
5223 operands[0] = result, initialized with the startaddress
5224 operands[1] = alignment of the address.
5225 operands[2] = scratch register, initialized with the startaddress when
5226 not aligned, otherwise undefined
5228 This is just the body. It needs the initialisations mentioned above and
5229 some address computing at the end. These things are done in i386.md. */
5232 output_strlen_unroll (operands
)
5237 xops
[0] = operands
[0]; /* Result */
5238 /* operands[1]; * Alignment */
5239 xops
[1] = operands
[2]; /* Scratch */
5240 xops
[2] = GEN_INT (0);
5241 xops
[3] = GEN_INT (2);
5242 xops
[4] = GEN_INT (3);
5243 xops
[5] = GEN_INT (4);
5244 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
5245 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
5246 xops
[8] = gen_label_rtx (); /* label of main loop */
5248 if (TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
5249 xops
[9] = gen_label_rtx (); /* pentium optimisation */
5251 xops
[10] = gen_label_rtx (); /* end label 2 */
5252 xops
[11] = gen_label_rtx (); /* end label 1 */
5253 xops
[12] = gen_label_rtx (); /* end label */
5254 /* xops[13] * Temporary used */
5255 xops
[14] = GEN_INT (0xff);
5256 xops
[15] = GEN_INT (0xff00);
5257 xops
[16] = GEN_INT (0xff0000);
5258 xops
[17] = GEN_INT (0xff000000);
5260 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
5262 /* Is there a known alignment and is it less than 4? */
5263 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
5265 /* Is there a known alignment and is it not 2? */
5266 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
5268 xops
[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5269 xops
[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5271 /* Leave just the 3 lower bits.
5272 If this is a q-register, then the high part is used later
5273 therefore use andl rather than andb. */
5274 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
5276 /* Is aligned to 4-byte address when zero */
5277 output_asm_insn (AS1 (je
,%l8
), xops
);
5279 /* Side-effect even Parity when %eax == 3 */
5280 output_asm_insn (AS1 (jp
,%6), xops
);
5282 /* Is it aligned to 2 bytes ? */
5283 if (QI_REG_P (xops
[1]))
5284 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
5286 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
5288 output_asm_insn (AS1 (je
,%7), xops
);
5292 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5293 check if is aligned to 4 - byte. */
5294 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
5296 /* Is aligned to 4-byte address when zero */
5297 output_asm_insn (AS1 (je
,%l8
), xops
);
5300 xops
[13] = gen_rtx_MEM (QImode
, xops
[0]);
5302 /* Now compare the bytes; compare with the high part of a q-reg
5303 gives shorter code. */
5304 if (QI_REG_P (xops
[1]))
5306 /* Compare the first n unaligned byte on a byte per byte basis. */
5307 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5309 /* When zero we reached the end. */
5310 output_asm_insn (AS1 (je
,%l12
), xops
);
5312 /* Increment the address. */
5313 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5315 /* Not needed with an alignment of 2 */
5316 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
5318 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5319 CODE_LABEL_NUMBER (xops
[7]));
5320 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5321 output_asm_insn (AS1 (je
,%l12
), xops
);
5322 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5324 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5325 CODE_LABEL_NUMBER (xops
[6]));
5328 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5332 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5333 output_asm_insn (AS1 (je
,%l12
), xops
);
5334 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5336 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5337 CODE_LABEL_NUMBER (xops
[7]));
5338 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5339 output_asm_insn (AS1 (je
,%l12
), xops
);
5340 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5342 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5343 CODE_LABEL_NUMBER (xops
[6]));
5344 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5347 output_asm_insn (AS1 (je
,%l12
), xops
);
5348 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5351 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5352 align this loop. It gives only huge programs, but does not help to
5354 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
5356 xops
[13] = gen_rtx_MEM (SImode
, xops
[0]);
5357 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
5359 if (QI_REG_P (xops
[1]))
5361 /* On i586 it is faster to combine the hi- and lo- part as
5362 a kind of lookahead. If anding both yields zero, then one
5363 of both *could* be zero, otherwise none of both is zero;
5364 this saves one instruction, on i486 this is slower
5365 tested with P-90, i486DX2-66, AMD486DX2-66 */
5368 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
5369 output_asm_insn (AS1 (jne
,%l9
), xops
);
5372 /* Check first byte. */
5373 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
5374 output_asm_insn (AS1 (je
,%l12
), xops
);
5376 /* Check second byte. */
5377 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
5378 output_asm_insn (AS1 (je
,%l11
), xops
);
5381 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5382 CODE_LABEL_NUMBER (xops
[9]));
5387 /* Check first byte. */
5388 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
5389 output_asm_insn (AS1 (je
,%l12
), xops
);
5391 /* Check second byte. */
5392 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
5393 output_asm_insn (AS1 (je
,%l11
), xops
);
5396 /* Check third byte. */
5397 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
5398 output_asm_insn (AS1 (je
,%l10
), xops
);
5400 /* Check fourth byte and increment address. */
5401 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
5402 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
5403 output_asm_insn (AS1 (jne
,%l8
), xops
);
5405 /* Now generate fixups when the compare stops within a 4-byte word. */
5406 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
5408 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
5409 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5411 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
5412 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5414 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));
5420 output_fp_conditional_move (which_alternative
, operands
)
5421 int which_alternative
;
5424 switch (which_alternative
)
5427 /* r <- cond ? arg : r */
5428 output_asm_insn (AS2 (fcmov
%F1
,%2,%0), operands
);
5432 /* r <- cond ? r : arg */
5433 output_asm_insn (AS2 (fcmov
%f1
,%3,%0), operands
);
5437 /* r <- cond ? r : arg */
5438 output_asm_insn (AS2 (fcmov
%F1
,%2,%0), operands
);
5439 output_asm_insn (AS2 (fcmov
%f1
,%3,%0), operands
);
5450 output_int_conditional_move (which_alternative
, operands
)
5451 int which_alternative
;
5454 int code
= GET_CODE (operands
[1]);
5455 enum machine_mode mode
;
5458 /* This is very tricky. We have to do it right. For a code segement
5467 final_scan_insn () may delete the insn which sets CC. We have to
5468 tell final_scan_insn () if it should be reinserted. When CODE is
5469 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5470 NULL_PTR to tell final to reinsert the test insn because the
5471 conditional move cannot be handled properly without it. */
5472 if ((code
== GT
|| code
== LE
)
5473 && (cc_prev_status
.flags
& CC_NO_OVERFLOW
))
5476 mode
= GET_MODE (operands
[0]);
5479 xops
[0] = gen_rtx_SUBREG (SImode
, operands
[0], 1);
5480 xops
[1] = operands
[1];
5481 xops
[2] = gen_rtx_SUBREG (SImode
, operands
[2], 1);
5482 xops
[3] = gen_rtx_SUBREG (SImode
, operands
[3], 1);
5485 switch (which_alternative
)
5488 /* r <- cond ? arg : r */
5489 output_asm_insn (AS2 (cmov
%C1
,%2,%0), operands
);
5491 output_asm_insn (AS2 (cmov
%C1
,%2,%0), xops
);
5495 /* r <- cond ? r : arg */
5496 output_asm_insn (AS2 (cmov
%c1
,%3,%0), operands
);
5498 output_asm_insn (AS2 (cmov
%c1
,%3,%0), xops
);
5502 /* rm <- cond ? arg1 : arg2 */
5503 output_asm_insn (AS2 (cmov
%C1
,%2,%0), operands
);
5504 output_asm_insn (AS2 (cmov
%c1
,%3,%0), operands
);
5507 output_asm_insn (AS2 (cmov
%C1
,%2,%0), xops
);
5508 output_asm_insn (AS2 (cmov
%c1
,%3,%0), xops
);
5520 x86_adjust_cost (insn
, link
, dep_insn
, cost
)
5521 rtx insn
, link
, dep_insn
;
5526 if (GET_CODE (dep_insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
)
5529 if (GET_CODE (dep_insn
) == INSN
5530 && GET_CODE (PATTERN (dep_insn
)) == SET
5531 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
5532 && GET_CODE (insn
) == INSN
5533 && GET_CODE (PATTERN (insn
)) == SET
5534 && !reg_overlap_mentioned_p (SET_DEST (PATTERN (dep_insn
)),
5535 SET_SRC (PATTERN (insn
))))
5541 case PROCESSOR_PENTIUM
:
5542 if (cost
!= 0 && is_fp_insn (insn
) && is_fp_insn (dep_insn
)
5543 && !is_fp_dest (dep_insn
))
5546 if (agi_dependent (insn
, dep_insn
))
5549 if (GET_CODE (insn
) == INSN
5550 && GET_CODE (PATTERN (insn
)) == SET
5551 && SET_DEST (PATTERN (insn
)) == cc0_rtx
5552 && (next_inst
= next_nonnote_insn (insn
))
5553 && GET_CODE (next_inst
) == JUMP_INSN
)
5554 /* compare probably paired with jump */
5560 if (!is_fp_dest (dep_insn
))
5562 if(!agi_dependent (insn
, dep_insn
))
5568 if (is_fp_store (insn
) && is_fp_insn (dep_insn
)
5569 && NEXT_INSN (insn
) && NEXT_INSN (NEXT_INSN (insn
))
5570 && NEXT_INSN (NEXT_INSN (NEXT_INSN (insn
)))
5571 && (GET_CODE (NEXT_INSN (insn
)) == INSN
)
5572 && (GET_CODE (NEXT_INSN (NEXT_INSN (insn
))) == JUMP_INSN
)
5573 && (GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn
)))) == NOTE
)
5574 && (NOTE_LINE_NUMBER (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn
))))
5575 == NOTE_INSN_LOOP_END
))