1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
54 #ifdef EXTRA_CONSTRAINT
55 /* If EXTRA_CONSTRAINT is defined, then the 'S'
56 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
57 asm statements that need 'S' for class SIREG will break. */
58 error EXTRA_CONSTRAINT conflicts with S constraint letter
59 /* The previous line used to be #error, but some compilers barf
60 even if the conditional was untrue. */
63 #ifndef CHECK_STACK_LIMIT
64 #define CHECK_STACK_LIMIT -1
67 /* Type of an operand for ix86_{binary,unary}_operator_ok */
75 /* Processor costs (relative to an add) */
76 struct processor_costs i386_cost
= { /* 386 specific costs */
77 1, /* cost of an add instruction */
78 1, /* cost of a lea instruction */
79 3, /* variable shift costs */
80 2, /* constant shift costs */
81 6, /* cost of starting a multiply */
82 1, /* cost of multiply per each bit set */
83 23 /* cost of a divide/mod */
86 struct processor_costs i486_cost
= { /* 486 specific costs */
87 1, /* cost of an add instruction */
88 1, /* cost of a lea instruction */
89 3, /* variable shift costs */
90 2, /* constant shift costs */
91 12, /* cost of starting a multiply */
92 1, /* cost of multiply per each bit set */
93 40 /* cost of a divide/mod */
96 struct processor_costs pentium_cost
= {
97 1, /* cost of an add instruction */
98 1, /* cost of a lea instruction */
99 4, /* variable shift costs */
100 1, /* constant shift costs */
101 11, /* cost of starting a multiply */
102 0, /* cost of multiply per each bit set */
103 25 /* cost of a divide/mod */
106 struct processor_costs pentiumpro_cost
= {
107 1, /* cost of an add instruction */
108 1, /* cost of a lea instruction */
109 3, /* variable shift costs */
110 1, /* constant shift costs */
111 4, /* cost of starting a multiply */
112 0, /* cost of multiply per each bit set */
113 17 /* cost of a divide/mod */
116 struct processor_costs
*ix86_cost
= &pentium_cost
;
118 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
120 extern FILE *asm_out_file
;
121 extern char *strcat ();
123 static void ix86_epilogue
PROTO((int));
124 static void ix86_prologue
PROTO((int));
126 char *singlemove_string ();
127 char *output_move_const_single ();
128 char *output_fp_cc0_set ();
130 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
131 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
132 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
134 /* Array of the smallest class containing reg number REGNO, indexed by
135 REGNO. Used by REGNO_REG_CLASS in i386.h. */
137 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
140 AREG
, DREG
, CREG
, BREG
,
142 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
144 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
145 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
150 /* Test and compare insns in i386.md store the information needed to
151 generate branch and scc insns here. */
153 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
154 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
155 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
157 /* which cpu are we scheduling for */
158 enum processor_type ix86_cpu
;
160 /* which instruction set architecture to use. */
163 /* Strings to hold which cpu and instruction set architecture to use. */
164 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
165 char *ix86_arch_string
; /* for -march=<xxx> */
167 /* Register allocation order */
168 char *i386_reg_alloc_order
;
169 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
171 /* # of registers to use to pass arguments. */
172 char *i386_regparm_string
;
174 /* i386_regparm_string as a number */
177 /* Alignment to use for loops and jumps: */
179 /* Power of two alignment for loops. */
180 char *i386_align_loops_string
;
182 /* Power of two alignment for non-loop jumps. */
183 char *i386_align_jumps_string
;
185 /* Values 1-5: see jump.c */
186 int i386_branch_cost
;
187 char *i386_branch_cost_string
;
189 /* Power of two alignment for functions. */
190 int i386_align_funcs
;
191 char *i386_align_funcs_string
;
193 /* Power of two alignment for loops. */
194 int i386_align_loops
;
196 /* Power of two alignment for non-loop jumps. */
197 int i386_align_jumps
;
199 /* Sometimes certain combinations of command options do not make
200 sense on a particular target machine. You can define a macro
201 `OVERRIDE_OPTIONS' to take account of this. This macro, if
202 defined, is executed once just after all the command options have
205 Don't use this macro to turn on various extra optimizations for
206 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
216 char *name
; /* Canonical processor name. */
217 enum processor_type processor
; /* Processor type enum value. */
218 struct processor_costs
*cost
; /* Processor costs */
219 int target_enable
; /* Target flags to enable. */
220 int target_disable
; /* Target flags to disable. */
221 } processor_target_table
[]
222 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
223 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
224 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
225 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
226 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
,
228 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
,
229 &pentiumpro_cost
, 0, 0}};
231 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
233 #ifdef SUBTARGET_OVERRIDE_OPTIONS
234 SUBTARGET_OVERRIDE_OPTIONS
;
237 /* Validate registers in register allocation order. */
238 if (i386_reg_alloc_order
)
240 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
246 case 'a': regno
= 0; break;
247 case 'd': regno
= 1; break;
248 case 'c': regno
= 2; break;
249 case 'b': regno
= 3; break;
250 case 'S': regno
= 4; break;
251 case 'D': regno
= 5; break;
252 case 'B': regno
= 6; break;
254 default: fatal ("Register '%c' is unknown", ch
);
257 if (regs_allocated
[regno
])
258 fatal ("Register '%c' already specified in allocation order", ch
);
260 regs_allocated
[regno
] = 1;
264 if (ix86_arch_string
== 0)
266 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
267 if (ix86_cpu_string
== 0)
268 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
271 for (i
= 0; i
< ptt_size
; i
++)
272 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
274 ix86_arch
= processor_target_table
[i
].processor
;
275 if (ix86_cpu_string
== 0)
276 ix86_cpu_string
= processor_target_table
[i
].name
;
282 error ("bad value (%s) for -march= switch", ix86_arch_string
);
283 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
284 ix86_arch
= PROCESSOR_DEFAULT
;
287 if (ix86_cpu_string
== 0)
288 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
290 for (j
= 0; j
< ptt_size
; j
++)
291 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
293 ix86_cpu
= processor_target_table
[j
].processor
;
294 ix86_cost
= processor_target_table
[j
].cost
;
295 if (i
> j
&& (int) ix86_arch
>= (int) PROCESSOR_PENTIUMPRO
)
296 error ("-mcpu=%s does not support -march=%s",
297 ix86_cpu_string
, ix86_arch_string
);
299 target_flags
|= processor_target_table
[j
].target_enable
;
300 target_flags
&= ~processor_target_table
[j
].target_disable
;
306 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
307 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
308 ix86_cpu
= PROCESSOR_DEFAULT
;
311 /* Validate -mregparm= value. */
312 if (i386_regparm_string
)
314 i386_regparm
= atoi (i386_regparm_string
);
315 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
316 fatal ("-mregparm=%d is not between 0 and %d",
317 i386_regparm
, REGPARM_MAX
);
320 /* The 486 suffers more from non-aligned cache line fills, and the
321 larger code size results in a larger cache foot-print and more misses.
322 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
324 def_align
= (TARGET_486
) ? 4 : 2;
326 /* Validate -malign-loops= value, or provide default. */
327 if (i386_align_loops_string
)
329 i386_align_loops
= atoi (i386_align_loops_string
);
330 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
331 fatal ("-malign-loops=%d is not between 0 and %d",
332 i386_align_loops
, MAX_CODE_ALIGN
);
335 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
336 i386_align_loops
= 4;
338 i386_align_loops
= 2;
341 /* Validate -malign-jumps= value, or provide default. */
342 if (i386_align_jumps_string
)
344 i386_align_jumps
= atoi (i386_align_jumps_string
);
345 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
346 fatal ("-malign-jumps=%d is not between 0 and %d",
347 i386_align_jumps
, MAX_CODE_ALIGN
);
350 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
351 i386_align_jumps
= 4;
353 i386_align_jumps
= def_align
;
356 /* Validate -malign-functions= value, or provide default. */
357 if (i386_align_funcs_string
)
359 i386_align_funcs
= atoi (i386_align_funcs_string
);
360 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
361 fatal ("-malign-functions=%d is not between 0 and %d",
362 i386_align_funcs
, MAX_CODE_ALIGN
);
365 i386_align_funcs
= def_align
;
367 /* Validate -mbranch-cost= value, or provide default. */
368 if (i386_branch_cost_string
)
370 i386_branch_cost
= atoi (i386_branch_cost_string
);
371 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
372 fatal ("-mbranch-cost=%d is not between 0 and 5",
376 i386_branch_cost
= 1;
378 /* Keep nonleaf frame pointers. */
379 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
380 flag_omit_frame_pointer
= 1;
383 /* A C statement (sans semicolon) to choose the order in which to
384 allocate hard registers for pseudo-registers local to a basic
387 Store the desired register order in the array `reg_alloc_order'.
388 Element 0 should be the register to allocate first; element 1, the
389 next register; and so on.
391 The macro body should not assume anything about the contents of
392 `reg_alloc_order' before execution of the macro.
394 On most machines, it is not necessary to define this macro. */
397 order_regs_for_local_alloc ()
401 /* User specified the register allocation order. */
403 if (i386_reg_alloc_order
)
405 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
411 case 'a': regno
= 0; break;
412 case 'd': regno
= 1; break;
413 case 'c': regno
= 2; break;
414 case 'b': regno
= 3; break;
415 case 'S': regno
= 4; break;
416 case 'D': regno
= 5; break;
417 case 'B': regno
= 6; break;
420 reg_alloc_order
[order
++] = regno
;
423 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
425 if (! regs_allocated
[i
])
426 reg_alloc_order
[order
++] = i
;
430 /* If user did not specify a register allocation order, use natural order. */
433 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
434 reg_alloc_order
[i
] = i
;
439 optimization_options (level
, size
)
443 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
444 make the problem with not enough registers even worse. */
445 #ifdef INSN_SCHEDULING
447 flag_schedule_insns
= 0;
451 /* Sign-extend a 16-bit constant */
454 i386_sext16_if_const (op
)
457 if (GET_CODE (op
) == CONST_INT
)
459 HOST_WIDE_INT val
= INTVAL (op
);
460 HOST_WIDE_INT sext_val
;
462 sext_val
= val
| ~0xffff;
464 sext_val
= val
& 0xffff;
466 op
= GEN_INT (sext_val
);
471 /* Return nonzero if the rtx is aligned */
474 i386_aligned_reg_p (regno
)
477 return (regno
== STACK_POINTER_REGNUM
478 || (! flag_omit_frame_pointer
&& regno
== FRAME_POINTER_REGNUM
));
485 /* Registers and immediate operands are always "aligned". */
486 if (GET_CODE (op
) != MEM
)
489 /* Don't even try to do any aligned optimizations with volatiles. */
490 if (MEM_VOLATILE_P (op
))
493 /* Get address of memory operand. */
496 switch (GET_CODE (op
))
503 /* Match "reg + offset" */
505 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
507 if (INTVAL (XEXP (op
, 1)) & 3)
511 if (GET_CODE (op
) != REG
)
514 /* ... fall through ... */
517 return i386_aligned_reg_p (REGNO (op
));
526 /* Return nonzero if INSN looks like it won't compute useful cc bits
527 as a side effect. This information is only a hint. */
530 i386_cc_probably_useless_p (insn
)
533 return ! next_cc0_user (insn
);
536 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
537 attribute for DECL. The attributes in ATTRIBUTES have previously been
541 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
550 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
551 attribute for TYPE. The attributes in ATTRIBUTES have previously been
555 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
561 if (TREE_CODE (type
) != FUNCTION_TYPE
562 && TREE_CODE (type
) != FIELD_DECL
563 && TREE_CODE (type
) != TYPE_DECL
)
566 /* Stdcall attribute says callee is responsible for popping arguments
567 if they are not variable. */
568 if (is_attribute_p ("stdcall", identifier
))
569 return (args
== NULL_TREE
);
571 /* Cdecl attribute says the callee is a normal C declaration. */
572 if (is_attribute_p ("cdecl", identifier
))
573 return (args
== NULL_TREE
);
575 /* Regparm attribute specifies how many integer arguments are to be
576 passed in registers. */
577 if (is_attribute_p ("regparm", identifier
))
581 if (! args
|| TREE_CODE (args
) != TREE_LIST
582 || TREE_CHAIN (args
) != NULL_TREE
583 || TREE_VALUE (args
) == NULL_TREE
)
586 cst
= TREE_VALUE (args
);
587 if (TREE_CODE (cst
) != INTEGER_CST
)
590 if (TREE_INT_CST_HIGH (cst
) != 0
591 || TREE_INT_CST_LOW (cst
) < 0
592 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
601 /* Return 0 if the attributes for two types are incompatible, 1 if they
602 are compatible, and 2 if they are nearly compatible (which causes a
603 warning to be generated). */
606 i386_comp_type_attributes (type1
, type2
)
614 /* Value is the number of bytes of arguments automatically
615 popped when returning from a subroutine call.
616 FUNDECL is the declaration node of the function (as a tree),
617 FUNTYPE is the data type of the function (as a tree),
618 or for a library call it is an identifier node for the subroutine name.
619 SIZE is the number of bytes of arguments passed on the stack.
621 On the 80386, the RTD insn may be used to pop them if the number
622 of args is fixed, but if the number is variable then the caller
623 must pop them all. RTD can't be used for library calls now
624 because the library is compiled with the Unix compiler.
625 Use of RTD is a selectable option, since it is incompatible with
626 standard Unix calling sequences. If the option is not selected,
627 the caller must always pop the args.
629 The attribute stdcall is equivalent to RTD on a per module basis. */
632 i386_return_pops_args (fundecl
, funtype
, size
)
637 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
639 /* Cdecl functions override -mrtd, and never pop the stack. */
640 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
642 /* Stdcall functions will pop the stack if not variable args. */
643 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
647 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
648 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
)))
653 /* Lose any fake structure return argument. */
654 if (aggregate_value_p (TREE_TYPE (funtype
)))
655 return GET_MODE_SIZE (Pmode
);
661 /* Argument support functions. */
663 /* Initialize a variable CUM of type CUMULATIVE_ARGS
664 for a call to a function whose data type is FNTYPE.
665 For a library call, FNTYPE is 0. */
668 init_cumulative_args (cum
, fntype
, libname
)
669 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize */
670 tree fntype
; /* tree ptr for function decl */
671 rtx libname
; /* SYMBOL_REF of library name or 0 */
673 static CUMULATIVE_ARGS zero_cum
;
674 tree param
, next_param
;
676 if (TARGET_DEBUG_ARG
)
678 fprintf (stderr
, "\ninit_cumulative_args (");
680 fprintf (stderr
, "fntype code = %s, ret code = %s",
681 tree_code_name
[(int) TREE_CODE (fntype
)],
682 tree_code_name
[(int) TREE_CODE (TREE_TYPE (fntype
))]);
684 fprintf (stderr
, "no fntype");
687 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
692 /* Set up the number of registers to use for passing arguments. */
693 cum
->nregs
= i386_regparm
;
696 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
699 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
702 /* Determine if this function has variable arguments. This is
703 indicated by the last argument being 'void_type_mode' if there
704 are no variable arguments. If there are variable arguments, then
705 we won't pass anything in registers */
709 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
710 param
!= 0; param
= next_param
)
712 next_param
= TREE_CHAIN (param
);
713 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
718 if (TARGET_DEBUG_ARG
)
719 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
724 /* Update the data in CUM to advance over an argument
725 of mode MODE and data type TYPE.
726 (TYPE is null for libcalls where that information may not be available.) */
729 function_arg_advance (cum
, mode
, type
, named
)
730 CUMULATIVE_ARGS
*cum
; /* current arg information */
731 enum machine_mode mode
; /* current arg mode */
732 tree type
; /* type of the argument or 0 if lib support */
733 int named
; /* whether or not the argument was named */
736 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
737 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
739 if (TARGET_DEBUG_ARG
)
741 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
742 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
757 /* Define where to put the arguments to a function.
758 Value is zero to push the argument on the stack,
759 or a hard register in which to store the argument.
761 MODE is the argument's machine mode.
762 TYPE is the data type of the argument (as a tree).
763 This is null for libcalls where that information may
765 CUM is a variable of type CUMULATIVE_ARGS which gives info about
766 the preceding args and about the function being called.
767 NAMED is nonzero if this argument is a named parameter
768 (otherwise it is an extra parameter matching an ellipsis). */
771 function_arg (cum
, mode
, type
, named
)
772 CUMULATIVE_ARGS
*cum
; /* current arg information */
773 enum machine_mode mode
; /* current arg mode */
774 tree type
; /* type of the argument or 0 if lib support */
775 int named
; /* != 0 for normal args, == 0 for ... args */
779 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
780 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
784 /* For now, pass fp/complex values on the stack. */
793 if (words
<= cum
->nregs
)
794 ret
= gen_rtx_REG (mode
, cum
->regno
);
798 if (TARGET_DEBUG_ARG
)
801 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
802 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
805 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
807 fprintf (stderr
, ", stack");
809 fprintf (stderr
, " )\n");
815 /* For an arg passed partly in registers and partly in memory,
816 this is the number of registers used.
817 For args passed entirely in registers or entirely in memory, zero. */
820 function_arg_partial_nregs (cum
, mode
, type
, named
)
821 CUMULATIVE_ARGS
*cum
; /* current arg information */
822 enum machine_mode mode
; /* current arg mode */
823 tree type
; /* type of the argument or 0 if lib support */
824 int named
; /* != 0 for normal args, == 0 for ... args */
829 /* Output an insn whose source is a 386 integer register. SRC is the
830 rtx for the register, and TEMPLATE is the op-code template. SRC may
831 be either SImode or DImode.
833 The template will be output with operands[0] as SRC, and operands[1]
834 as a pointer to the top of the 386 stack. So a call from floatsidf2
835 would look like this:
837 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
839 where %z0 corresponds to the caller's operands[1], and is used to
840 emit the proper size suffix.
842 ??? Extend this to handle HImode - a 387 can load and store HImode
846 output_op_from_reg (src
, template)
851 int size
= GET_MODE_SIZE (GET_MODE (src
));
854 xops
[1] = AT_SP (Pmode
);
855 xops
[2] = GEN_INT (size
);
856 xops
[3] = stack_pointer_rtx
;
858 if (size
> UNITS_PER_WORD
)
862 if (size
> 2 * UNITS_PER_WORD
)
864 high
= gen_rtx_REG (SImode
, REGNO (src
) + 2);
865 output_asm_insn (AS1 (push
%L0
,%0), &high
);
868 high
= gen_rtx_REG (SImode
, REGNO (src
) + 1);
869 output_asm_insn (AS1 (push
%L0
,%0), &high
);
872 output_asm_insn (AS1 (push
%L0
,%0), &src
);
873 output_asm_insn (template, xops
);
874 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
877 /* Output an insn to pop an value from the 387 top-of-stack to 386
878 register DEST. The 387 register stack is popped if DIES is true. If
879 the mode of DEST is an integer mode, a `fist' integer store is done,
880 otherwise a `fst' float store is done. */
883 output_to_reg (dest
, dies
, scratch_mem
)
889 int size
= GET_MODE_SIZE (GET_MODE (dest
));
892 xops
[0] = AT_SP (Pmode
);
894 xops
[0] = scratch_mem
;
896 xops
[1] = stack_pointer_rtx
;
897 xops
[2] = GEN_INT (size
);
901 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
903 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
906 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
908 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
911 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
914 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
917 if (GET_MODE (dest
) == XFmode
)
919 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
920 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
923 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
931 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
933 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
936 if (size
> UNITS_PER_WORD
)
938 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
940 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
943 xops
[0] = adj_offsettable_operand (xops
[0], 4);
945 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
948 if (size
> 2 * UNITS_PER_WORD
)
950 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
952 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
955 xops
[0] = adj_offsettable_operand (xops
[0], 4);
956 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
963 singlemove_string (operands
)
967 if (GET_CODE (operands
[0]) == MEM
968 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
970 if (XEXP (x
, 0) != stack_pointer_rtx
)
974 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
975 return output_move_const_single (operands
);
976 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
977 return AS2 (mov
%L0
,%1,%0);
978 else if (CONSTANT_P (operands
[1]))
979 return AS2 (mov
%L0
,%1,%0);
982 output_asm_insn ("push%L1 %1", operands
);
987 /* Return a REG that occurs in ADDR with coefficient 1.
988 ADDR can be effectively incremented by incrementing REG. */
994 while (GET_CODE (addr
) == PLUS
)
996 if (GET_CODE (XEXP (addr
, 0)) == REG
)
997 addr
= XEXP (addr
, 0);
998 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
999 addr
= XEXP (addr
, 1);
1000 else if (CONSTANT_P (XEXP (addr
, 0)))
1001 addr
= XEXP (addr
, 1);
1002 else if (CONSTANT_P (XEXP (addr
, 1)))
1003 addr
= XEXP (addr
, 0);
1008 if (GET_CODE (addr
) == REG
)
1013 /* Output an insn to add the constant N to the register X. */
1024 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
1026 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
1027 else if (n
< 0 || n
== 128)
1029 xops
[1] = GEN_INT (-n
);
1030 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
1034 xops
[1] = GEN_INT (n
);
1035 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
1039 /* Output assembler code to perform a doubleword move insn
1040 with operands OPERANDS. */
1043 output_move_double (operands
)
1046 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
1050 rtx addreg0
= 0, addreg1
= 0;
1051 int dest_overlapped_low
= 0;
1052 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1057 /* First classify both operands. */
1059 if (REG_P (operands
[0]))
1061 else if (offsettable_memref_p (operands
[0]))
1063 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1065 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1067 else if (GET_CODE (operands
[0]) == MEM
)
1072 if (REG_P (operands
[1]))
1074 else if (CONSTANT_P (operands
[1]))
1076 else if (offsettable_memref_p (operands
[1]))
1078 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1080 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1082 else if (GET_CODE (operands
[1]) == MEM
)
1087 /* Check for the cases that the operand constraints are not
1088 supposed to allow to happen. Abort if we get one,
1089 because generating code for these cases is painful. */
1091 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1094 /* If one operand is decrementing and one is incrementing
1095 decrement the former register explicitly
1096 and change that operand into ordinary indexing. */
1098 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1100 /* ??? Can this ever happen on i386? */
1101 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1102 asm_add (-size
, operands
[0]);
1103 if (GET_MODE (operands
[1]) == XFmode
)
1104 operands
[0] = gen_rtx_MEM (XFmode
, operands
[0]);
1105 else if (GET_MODE (operands
[0]) == DFmode
)
1106 operands
[0] = gen_rtx_MEM (DFmode
, operands
[0]);
1108 operands
[0] = gen_rtx_MEM (DImode
, operands
[0]);
1112 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1114 /* ??? Can this ever happen on i386? */
1115 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1116 asm_add (-size
, operands
[1]);
1117 if (GET_MODE (operands
[1]) == XFmode
)
1118 operands
[1] = gen_rtx_MEM (XFmode
, operands
[1]);
1119 else if (GET_MODE (operands
[1]) == DFmode
)
1120 operands
[1] = gen_rtx_MEM (DFmode
, operands
[1]);
1122 operands
[1] = gen_rtx_MEM (DImode
, operands
[1]);
1126 /* If an operand is an unoffsettable memory ref, find a register
1127 we can increment temporarily to make it refer to the second word. */
1129 if (optype0
== MEMOP
)
1130 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1132 if (optype1
== MEMOP
)
1133 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1135 /* Ok, we can do one word at a time.
1136 Normally we do the low-numbered word first,
1137 but if either operand is autodecrementing then we
1138 do the high-numbered word first.
1140 In either case, set up in LATEHALF the operands to use
1141 for the high-numbered word and in some cases alter the
1142 operands in OPERANDS to be suitable for the low-numbered word. */
1146 if (optype0
== REGOP
)
1148 middlehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1149 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 2);
1151 else if (optype0
== OFFSOP
)
1153 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1154 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1158 middlehalf
[0] = operands
[0];
1159 latehalf
[0] = operands
[0];
1162 if (optype1
== REGOP
)
1164 middlehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1165 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 2);
1167 else if (optype1
== OFFSOP
)
1169 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1170 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1172 else if (optype1
== CNSTOP
)
1174 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1176 REAL_VALUE_TYPE r
; long l
[3];
1178 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1179 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1180 operands
[1] = GEN_INT (l
[0]);
1181 middlehalf
[1] = GEN_INT (l
[1]);
1182 latehalf
[1] = GEN_INT (l
[2]);
1184 else if (CONSTANT_P (operands
[1]))
1185 /* No non-CONST_DOUBLE constant should ever appear here. */
1190 middlehalf
[1] = operands
[1];
1191 latehalf
[1] = operands
[1];
1197 /* Size is not 12. */
1199 if (optype0
== REGOP
)
1200 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1201 else if (optype0
== OFFSOP
)
1202 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1204 latehalf
[0] = operands
[0];
1206 if (optype1
== REGOP
)
1207 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1208 else if (optype1
== OFFSOP
)
1209 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1210 else if (optype1
== CNSTOP
)
1211 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1213 latehalf
[1] = operands
[1];
1216 /* If insn is effectively movd N (sp),-(sp) then we will do the
1217 high word first. We should use the adjusted operand 1
1218 (which is N+4 (sp) or N+8 (sp))
1219 for the low word and middle word as well,
1220 to compensate for the first decrement of sp. */
1221 if (optype0
== PUSHOP
1222 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1223 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1224 middlehalf
[1] = operands
[1] = latehalf
[1];
1226 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1227 if the upper part of reg N does not appear in the MEM, arrange to
1228 emit the move late-half first. Otherwise, compute the MEM address
1229 into the upper part of N and use that as a pointer to the memory
1231 if (optype0
== REGOP
1232 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1234 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1235 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1237 /* If both halves of dest are used in the src memory address,
1238 compute the address into latehalf of dest. */
1240 xops
[0] = latehalf
[0];
1241 xops
[1] = XEXP (operands
[1], 0);
1242 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1243 if (GET_MODE (operands
[1]) == XFmode
)
1245 operands
[1] = gen_rtx_MEM (XFmode
, latehalf
[0]);
1246 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1247 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1251 operands
[1] = gen_rtx_MEM (DImode
, latehalf
[0]);
1252 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1257 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1259 /* Check for two regs used by both source and dest. */
1260 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1261 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1264 /* JRV says this can't happen: */
1265 if (addreg0
|| addreg1
)
1268 /* Only the middle reg conflicts; simply put it last. */
1269 output_asm_insn (singlemove_string (operands
), operands
);
1270 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1271 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1275 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1276 /* If the low half of dest is mentioned in the source memory
1277 address, the arrange to emit the move late half first. */
1278 dest_overlapped_low
= 1;
1281 /* If one or both operands autodecrementing,
1282 do the two words, high-numbered first. */
1284 /* Likewise, the first move would clobber the source of the second one,
1285 do them in the other order. This happens only for registers;
1286 such overlap can't happen in memory unless the user explicitly
1287 sets it up, and that is an undefined circumstance. */
1290 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1291 || (optype0
== REGOP
&& optype1
== REGOP
1292 && REGNO (operands
[0]) == REGNO (latehalf
[1]))
1293 || dest_overlapped_low
)
1296 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1297 || (optype0
== REGOP
&& optype1
== REGOP
1298 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1299 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1300 || dest_overlapped_low
)
1302 /* Make any unoffsettable addresses point at high-numbered word. */
1304 asm_add (size
-4, addreg0
);
1306 asm_add (size
-4, addreg1
);
1309 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1311 /* Undo the adds we just did. */
1313 asm_add (-4, addreg0
);
1315 asm_add (-4, addreg1
);
1319 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1321 asm_add (-4, addreg0
);
1323 asm_add (-4, addreg1
);
1326 /* Do low-numbered word. */
1327 return singlemove_string (operands
);
1330 /* Normal case: do the two words, low-numbered first. */
1332 output_asm_insn (singlemove_string (operands
), operands
);
1334 /* Do the middle one of the three words for long double */
1338 asm_add (4, addreg0
);
1340 asm_add (4, addreg1
);
1342 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1345 /* Make any unoffsettable addresses point at high-numbered word. */
1347 asm_add (4, addreg0
);
1349 asm_add (4, addreg1
);
1352 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1354 /* Undo the adds we just did. */
1356 asm_add (4-size
, addreg0
);
1358 asm_add (4-size
, addreg1
);
1363 #define MAX_TMPS 2 /* max temporary registers used */
1365 /* Output the appropriate code to move push memory on the stack */
1368 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1380 } tmp_info
[MAX_TMPS
];
1382 rtx src
= operands
[1];
1385 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1386 int stack_offset
= 0;
1390 if (! offsettable_memref_p (src
))
1391 fatal_insn ("Source is not offsettable", insn
);
1393 if ((length
& 3) != 0)
1394 fatal_insn ("Pushing non-word aligned size", insn
);
1396 /* Figure out which temporary registers we have available */
1397 for (i
= tmp_start
; i
< n_operands
; i
++)
1399 if (GET_CODE (operands
[i
]) == REG
)
1401 if (reg_overlap_mentioned_p (operands
[i
], src
))
1404 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1405 if (max_tmps
== MAX_TMPS
)
1411 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1413 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1414 output_asm_insn (AS1(push
%L0
,%0), xops
);
1420 for (offset
= length
- 4; offset
>= 0; )
1422 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1424 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1425 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1426 tmp_info
[num_tmps
].xops
[0]
1427 = adj_offsettable_operand (src
, offset
+ stack_offset
);
1431 for (i
= 0; i
< num_tmps
; i
++)
1432 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1434 for (i
= 0; i
< num_tmps
; i
++)
1435 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1438 stack_offset
+= 4*num_tmps
;
1444 /* Output the appropriate code to move data between two memory locations */
1447 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1459 } tmp_info
[MAX_TMPS
];
1461 rtx dest
= operands
[0];
1462 rtx src
= operands
[1];
1463 rtx qi_tmp
= NULL_RTX
;
1469 if (GET_CODE (dest
) == MEM
1470 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1471 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1472 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1474 if (! offsettable_memref_p (src
))
1475 fatal_insn ("Source is not offsettable", insn
);
1477 if (! offsettable_memref_p (dest
))
1478 fatal_insn ("Destination is not offsettable", insn
);
1480 /* Figure out which temporary registers we have available */
1481 for (i
= tmp_start
; i
< n_operands
; i
++)
1483 if (GET_CODE (operands
[i
]) == REG
)
1485 if ((length
& 1) != 0 && qi_tmp
== 0 && QI_REG_P (operands
[i
]))
1486 qi_tmp
= operands
[i
];
1488 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1489 fatal_insn ("Temporary register overlaps the destination", insn
);
1491 if (reg_overlap_mentioned_p (operands
[i
], src
))
1492 fatal_insn ("Temporary register overlaps the source", insn
);
1494 tmp_info
[max_tmps
++].xops
[2] = operands
[i
];
1495 if (max_tmps
== MAX_TMPS
)
1501 fatal_insn ("No scratch registers were found to do memory->memory moves",
1504 if ((length
& 1) != 0)
1507 fatal_insn ("No byte register found when moving odd # of bytes.",
1513 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1517 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1518 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1519 tmp_info
[num_tmps
].xops
[0]
1520 = adj_offsettable_operand (dest
, offset
);
1521 tmp_info
[num_tmps
].xops
[1]
1522 = adj_offsettable_operand (src
, offset
);
1528 else if (length
>= 2)
1530 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1531 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1532 tmp_info
[num_tmps
].xops
[0]
1533 = adj_offsettable_operand (dest
, offset
);
1534 tmp_info
[num_tmps
].xops
[1]
1535 = adj_offsettable_operand (src
, offset
);
1544 for (i
= 0; i
< num_tmps
; i
++)
1545 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1547 for (i
= 0; i
< num_tmps
; i
++)
1548 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1553 xops
[0] = adj_offsettable_operand (dest
, offset
);
1554 xops
[1] = adj_offsettable_operand (src
, offset
);
1556 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1557 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1564 standard_80387_constant_p (x
)
1567 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1572 if (setjmp (handler
))
1575 set_float_handler (handler
);
1576 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1577 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1578 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1579 set_float_handler (NULL_PTR
);
1587 /* Note that on the 80387, other constants, such as pi,
1588 are much slower to load as standard constants
1589 than to load from doubles in memory! */
1596 output_move_const_single (operands
)
1599 if (FP_REG_P (operands
[0]))
1601 int conval
= standard_80387_constant_p (operands
[1]);
1610 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1612 REAL_VALUE_TYPE r
; long l
;
1614 if (GET_MODE (operands
[1]) == XFmode
)
1617 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1618 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1619 operands
[1] = GEN_INT (l
);
1622 return singlemove_string (operands
);
1625 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1626 reference and a constant. */
1629 symbolic_operand (op
, mode
)
1631 enum machine_mode mode
;
1633 switch (GET_CODE (op
))
1641 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1642 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1643 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1650 /* Test for a valid operand for a call instruction.
1651 Don't allow the arg pointer register or virtual regs
1652 since they may change into reg + const, which the patterns
1653 can't handle yet. */
1656 call_insn_operand (op
, mode
)
1658 enum machine_mode mode
;
1660 if (GET_CODE (op
) == MEM
1661 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1662 /* This makes a difference for PIC. */
1663 && general_operand (XEXP (op
, 0), Pmode
))
1664 || (GET_CODE (XEXP (op
, 0)) == REG
1665 && XEXP (op
, 0) != arg_pointer_rtx
1666 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1667 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1673 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1677 expander_call_insn_operand (op
, mode
)
1679 enum machine_mode mode
;
1681 if (GET_CODE (op
) == MEM
1682 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1683 || (GET_CODE (XEXP (op
, 0)) == REG
1684 && XEXP (op
, 0) != arg_pointer_rtx
1685 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1686 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1692 /* Return 1 if OP is a comparison operator that can use the condition code
1693 generated by an arithmetic operation. */
1696 arithmetic_comparison_operator (op
, mode
)
1698 enum machine_mode mode
;
1702 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1705 code
= GET_CODE (op
);
1706 if (GET_RTX_CLASS (code
) != '<')
1709 return (code
!= GT
&& code
!= LE
);
1713 ix86_logical_operator (op
, mode
)
1715 enum machine_mode mode
;
1717 return GET_CODE (op
) == AND
|| GET_CODE (op
) == IOR
|| GET_CODE (op
) == XOR
;
1721 /* Returns 1 if OP contains a symbol reference */
1724 symbolic_reference_mentioned_p (op
)
1730 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1733 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1734 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1740 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1741 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1745 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1752 /* Attempt to expand a binary operator. Make the expansion closer to the
1753 actual machine, then just general_operand, which will allow 3 separate
1754 memory references (one output, two input) in a single insn. Return
1755 whether the insn fails, or succeeds. */
1758 ix86_expand_binary_operator (code
, mode
, operands
)
1760 enum machine_mode mode
;
1765 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1766 if (GET_RTX_CLASS (code
) == 'c'
1767 && (rtx_equal_p (operands
[0], operands
[2])
1768 || immediate_operand (operands
[1], mode
)))
1770 rtx temp
= operands
[1];
1771 operands
[1] = operands
[2];
1775 /* If optimizing, copy to regs to improve CSE */
1776 if (TARGET_PSEUDO
&& optimize
1777 && ((reload_in_progress
| reload_completed
) == 0))
1779 if (GET_CODE (operands
[1]) == MEM
1780 && ! rtx_equal_p (operands
[0], operands
[1]))
1781 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1783 if (GET_CODE (operands
[2]) == MEM
)
1784 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1786 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1788 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1790 emit_move_insn (temp
, operands
[1]);
1796 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1798 /* If not optimizing, try to make a valid insn (optimize code
1799 previously did this above to improve chances of CSE) */
1801 if ((! TARGET_PSEUDO
|| !optimize
)
1802 && ((reload_in_progress
| reload_completed
) == 0)
1803 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1806 if (GET_CODE (operands
[1]) == MEM
1807 && ! rtx_equal_p (operands
[0], operands
[1]))
1809 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1813 if (GET_CODE (operands
[2]) == MEM
)
1815 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1819 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1821 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1823 emit_move_insn (temp
, operands
[1]);
1828 if (modified
&& ! ix86_binary_operator_ok (code
, mode
, operands
))
1838 /* Return TRUE or FALSE depending on whether the binary operator meets the
1839 appropriate constraints. */
1842 ix86_binary_operator_ok (code
, mode
, operands
)
1844 enum machine_mode mode
;
1847 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1848 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1851 /* Attempt to expand a unary operator. Make the expansion closer to the
1852 actual machine, then just general_operand, which will allow 2 separate
1853 memory references (one output, one input) in a single insn. Return
1854 whether the insn fails, or succeeds. */
1857 ix86_expand_unary_operator (code
, mode
, operands
)
1859 enum machine_mode mode
;
1862 /* If optimizing, copy to regs to improve CSE */
1865 && ((reload_in_progress
| reload_completed
) == 0)
1866 && GET_CODE (operands
[1]) == MEM
)
1867 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1869 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1871 if ((! TARGET_PSEUDO
|| optimize
== 0)
1872 && ((reload_in_progress
| reload_completed
) == 0)
1873 && GET_CODE (operands
[1]) == MEM
)
1875 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1876 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1886 /* Return TRUE or FALSE depending on whether the unary operator meets the
1887 appropriate constraints. */
1890 ix86_unary_operator_ok (code
, mode
, operands
)
1892 enum machine_mode mode
;
1898 static rtx pic_label_rtx
;
1899 static char pic_label_name
[256];
1900 static int pic_label_no
= 0;
1902 /* This function generates code for -fpic that loads %ebx with
1903 the return address of the caller and then returns. */
1906 asm_output_function_prefix (file
, name
)
1911 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1912 || current_function_uses_const_pool
);
1913 xops
[0] = pic_offset_table_rtx
;
1914 xops
[1] = stack_pointer_rtx
;
1916 /* Deep branch prediction favors having a return for every call. */
1917 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1921 if (pic_label_rtx
== 0)
1923 pic_label_rtx
= gen_label_rtx ();
1924 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
1925 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1928 prologue_node
= make_node (FUNCTION_DECL
);
1929 DECL_RESULT (prologue_node
) = 0;
1930 #ifdef ASM_DECLARE_FUNCTION_NAME
1931 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1933 output_asm_insn ("movl (%1),%0", xops
);
1934 output_asm_insn ("ret", xops
);
1938 /* Generate the assembly code for function entry.
1939 FILE is an stdio stream to output the code to.
1940 SIZE is an int: how many units of temporary storage to allocate. */
1943 function_prologue (file
, size
)
1947 if (TARGET_SCHEDULE_PROLOGUE
)
1956 /* Expand the prologue into a bunch of separate insns. */
1959 ix86_expand_prologue ()
1961 if (! TARGET_SCHEDULE_PROLOGUE
)
1968 load_pic_register (do_rtl
)
1973 if (TARGET_DEEP_BRANCH_PREDICTION
)
1975 xops
[0] = pic_offset_table_rtx
;
1976 if (pic_label_rtx
== 0)
1978 pic_label_rtx
= gen_label_rtx ();
1979 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
1980 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1983 xops
[1] = gen_rtx_MEM (QImode
,
1984 gen_rtx (SYMBOL_REF
, Pmode
,
1985 LABEL_NAME (pic_label_rtx
)));
1989 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
1990 emit_insn (gen_prologue_set_got (xops
[0],
1991 gen_rtx (SYMBOL_REF
, Pmode
,
1992 "$_GLOBAL_OFFSET_TABLE_"),
1997 output_asm_insn (AS1 (call
,%X1
), xops
);
1998 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
2005 xops
[0] = pic_offset_table_rtx
;
2006 xops
[1] = gen_label_rtx ();
2010 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2011 a new CODE_LABEL after reload, so we need a single pattern to
2012 emit the 3 necessary instructions. */
2013 emit_insn (gen_prologue_get_pc_and_set_got (xops
[0]));
2017 output_asm_insn (AS1 (call
,%P1
), xops
);
2018 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
2019 CODE_LABEL_NUMBER (xops
[1]));
2020 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
2021 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
2025 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2026 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2027 moved before any instruction which implicitly uses the got. */
2030 emit_insn (gen_blockage ());
2034 ix86_prologue (do_rtl
)
2040 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2041 || current_function_uses_const_pool
);
2042 long tsize
= get_frame_size ();
2044 int cfa_offset
= INCOMING_FRAME_SP_OFFSET
, cfa_store_offset
= cfa_offset
;
2046 xops
[0] = stack_pointer_rtx
;
2047 xops
[1] = frame_pointer_rtx
;
2048 xops
[2] = GEN_INT (tsize
);
2050 if (frame_pointer_needed
)
2054 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2055 gen_rtx_MEM (SImode
,
2056 gen_rtx (PRE_DEC
, SImode
,
2057 stack_pointer_rtx
)),
2058 frame_pointer_rtx
));
2060 RTX_FRAME_RELATED_P (insn
) = 1;
2061 insn
= emit_move_insn (xops
[1], xops
[0]);
2062 RTX_FRAME_RELATED_P (insn
) = 1;
2067 output_asm_insn ("push%L1 %1", xops
);
2068 #ifdef INCOMING_RETURN_ADDR_RTX
2069 if (dwarf2out_do_frame ())
2071 char *l
= dwarf2out_cfi_label ();
2073 cfa_store_offset
+= 4;
2074 cfa_offset
= cfa_store_offset
;
2075 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2076 dwarf2out_reg_save (l
, FRAME_POINTER_REGNUM
, - cfa_store_offset
);
2080 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
2081 #ifdef INCOMING_RETURN_ADDR_RTX
2082 if (dwarf2out_do_frame ())
2083 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM
, cfa_offset
);
2090 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
2094 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
2095 RTX_FRAME_RELATED_P (insn
) = 1;
2099 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
2100 #ifdef INCOMING_RETURN_ADDR_RTX
2101 if (dwarf2out_do_frame ())
2103 cfa_store_offset
+= tsize
;
2104 if (! frame_pointer_needed
)
2106 cfa_offset
= cfa_store_offset
;
2107 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, cfa_offset
);
2115 xops
[3] = gen_rtx_REG (SImode
, 0);
2117 emit_move_insn (xops
[3], xops
[2]);
2119 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
2121 xops
[3] = gen_rtx_MEM (FUNCTION_MODE
,
2122 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
2125 emit_call_insn (gen_rtx (CALL
, VOIDmode
, xops
[3], const0_rtx
));
2127 output_asm_insn (AS1 (call
,%P3
), xops
);
2130 /* Note If use enter it is NOT reversed args.
2131 This one is not reversed from intel!!
2132 I think enter is slower. Also sdb doesn't like it.
2133 But if you want it the code is:
2135 xops[3] = const0_rtx;
2136 output_asm_insn ("enter %2,%3", xops);
2140 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2141 for (regno
= limit
- 1; regno
>= 0; regno
--)
2142 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2143 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2145 xops
[0] = gen_rtx_REG (SImode
, regno
);
2148 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2149 gen_rtx_MEM (SImode
,
2150 gen_rtx (PRE_DEC
, SImode
,
2151 stack_pointer_rtx
)),
2154 RTX_FRAME_RELATED_P (insn
) = 1;
2158 output_asm_insn ("push%L0 %0", xops
);
2159 #ifdef INCOMING_RETURN_ADDR_RTX
2160 if (dwarf2out_do_frame ())
2162 char *l
= dwarf2out_cfi_label ();
2164 cfa_store_offset
+= 4;
2165 if (! frame_pointer_needed
)
2167 cfa_offset
= cfa_store_offset
;
2168 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2171 dwarf2out_reg_save (l
, regno
, - cfa_store_offset
);
2178 load_pic_register (do_rtl
);
2180 /* If we are profiling, make sure no instructions are scheduled before
2181 the call to mcount. However, if -fpic, the above call will have
2183 if ((profile_flag
|| profile_block_flag
)
2184 && ! pic_reg_used
&& do_rtl
)
2185 emit_insn (gen_blockage ());
2188 /* Return 1 if it is appropriate to emit `ret' instructions in the
2189 body of a function. Do this only if the epilogue is simple, needing a
2190 couple of insns. Prior to reloading, we can't tell how many registers
2191 must be saved, so return 0 then. Return 0 if there is no frame
2192 marker to de-allocate.
2194 If NON_SAVING_SETJMP is defined and true, then it is not possible
2195 for the epilogue to be simple, so return 0. This is a special case
2196 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2197 until final, but jump_optimize may need to know sooner if a
2201 ix86_can_use_return_insn_p ()
2205 int reglimit
= (frame_pointer_needed
2206 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2207 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2208 || current_function_uses_const_pool
);
2210 #ifdef NON_SAVING_SETJMP
2211 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2215 if (! reload_completed
)
2218 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2219 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2220 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2223 return nregs
== 0 || ! frame_pointer_needed
;
2226 /* This function generates the assembly code for function exit.
2227 FILE is an stdio stream to output the code to.
2228 SIZE is an int: how many units of temporary storage to deallocate. */
2231 function_epilogue (file
, size
)
2238 /* Restore function stack, frame, and registers. */
2241 ix86_expand_epilogue ()
2247 ix86_epilogue (do_rtl
)
2251 register int nregs
, limit
;
2254 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2255 || current_function_uses_const_pool
);
2256 long tsize
= get_frame_size ();
2258 /* Compute the number of registers to pop */
2260 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2264 for (regno
= limit
- 1; regno
>= 0; regno
--)
2265 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2266 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2269 /* sp is often unreliable so we must go off the frame pointer.
2271 In reality, we may not care if sp is unreliable, because we can restore
2272 the register relative to the frame pointer. In theory, since each move
2273 is the same speed as a pop, and we don't need the leal, this is faster.
2274 For now restore multiple registers the old way. */
2276 offset
= - tsize
- (nregs
* UNITS_PER_WORD
);
2278 xops
[2] = stack_pointer_rtx
;
2280 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2281 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2282 moved before any instruction which implicitly uses the got. This
2283 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2285 Alternatively, this could be fixed by making the dependence on the
2286 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2288 if (flag_pic
|| profile_flag
|| profile_block_flag
)
2289 emit_insn (gen_blockage ());
2291 if (nregs
> 1 || ! frame_pointer_needed
)
2293 if (frame_pointer_needed
)
2295 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2297 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2299 output_asm_insn (AS2 (lea
%L2
,%0,%2), xops
);
2302 for (regno
= 0; regno
< limit
; regno
++)
2303 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2304 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2306 xops
[0] = gen_rtx_REG (SImode
, regno
);
2309 emit_insn (gen_pop (xops
[0]));
2311 output_asm_insn ("pop%L0 %0", xops
);
2316 for (regno
= 0; regno
< limit
; regno
++)
2317 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2318 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2320 xops
[0] = gen_rtx_REG (SImode
, regno
);
2321 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2324 emit_move_insn (xops
[0], xops
[1]);
2326 output_asm_insn (AS2 (mov
%L0
,%1,%0), xops
);
2331 if (frame_pointer_needed
)
2333 /* If not an i386, mov & pop is faster than "leave". */
2335 if (TARGET_USE_LEAVE
)
2338 emit_insn (gen_leave());
2340 output_asm_insn ("leave", xops
);
2344 xops
[0] = frame_pointer_rtx
;
2345 xops
[1] = stack_pointer_rtx
;
2349 emit_insn (gen_epilogue_set_stack_ptr());
2350 emit_insn (gen_pop (xops
[0]));
2354 output_asm_insn (AS2 (mov
%L2
,%0,%2), xops
);
2355 output_asm_insn ("pop%L0 %0", xops
);
2362 /* If there is no frame pointer, we must still release the frame. */
2363 xops
[0] = GEN_INT (tsize
);
2366 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2367 gen_rtx (PLUS
, SImode
, xops
[2], xops
[0])));
2369 output_asm_insn (AS2 (add
%L2
,%0,%2), xops
);
2372 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2373 if (profile_block_flag
== 2)
2375 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2379 if (current_function_pops_args
&& current_function_args_size
)
2381 xops
[1] = GEN_INT (current_function_pops_args
);
2383 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2384 asked to pop more, pop return address, do explicit add, and jump
2385 indirectly to the caller. */
2387 if (current_function_pops_args
>= 32768)
2389 /* ??? Which register to use here? */
2390 xops
[0] = gen_rtx_REG (SImode
, 2);
2394 emit_insn (gen_pop (xops
[0]));
2395 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2396 gen_rtx (PLUS
, SImode
, xops
[1], xops
[2])));
2397 emit_jump_insn (xops
[0]);
2401 output_asm_insn ("pop%L0 %0", xops
);
2402 output_asm_insn (AS2 (add
%L2
,%1,%2), xops
);
2403 output_asm_insn ("jmp %*%0", xops
);
2409 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2411 output_asm_insn ("ret %1", xops
);
2417 emit_jump_insn (gen_return_internal ());
2419 output_asm_insn ("ret", xops
);
2423 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2424 that is a valid memory address for an instruction.
2425 The MODE argument is the machine mode for the MEM expression
2426 that wants to use this address.
2428 On x86, legitimate addresses are:
2429 base movl (base),reg
2430 displacement movl disp,reg
2431 base + displacement movl disp(base),reg
2432 index + base movl (base,index),reg
2433 (index + base) + displacement movl disp(base,index),reg
2434 index*scale movl (,index,scale),reg
2435 index*scale + disp movl disp(,index,scale),reg
2436 index*scale + base movl (base,index,scale),reg
2437 (index*scale + base) + disp movl disp(base,index,scale),reg
2439 In each case, scale can be 1, 2, 4, 8. */
2441 /* This is exactly the same as print_operand_addr, except that
2442 it recognizes addresses instead of printing them.
2444 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2445 convert common non-canonical forms to canonical form so that they will
2448 #define ADDR_INVALID(msg,insn) \
2450 if (TARGET_DEBUG_ADDR) \
2452 fprintf (stderr, msg); \
2458 legitimate_address_p (mode
, addr
, strict
)
2459 enum machine_mode mode
;
2463 rtx base
= NULL_RTX
;
2464 rtx indx
= NULL_RTX
;
2465 rtx scale
= NULL_RTX
;
2466 rtx disp
= NULL_RTX
;
2468 if (TARGET_DEBUG_ADDR
)
2471 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2472 GET_MODE_NAME (mode
), strict
);
2477 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2480 else if (GET_CODE (addr
) == PLUS
)
2482 rtx op0
= XEXP (addr
, 0);
2483 rtx op1
= XEXP (addr
, 1);
2484 enum rtx_code code0
= GET_CODE (op0
);
2485 enum rtx_code code1
= GET_CODE (op1
);
2487 if (code0
== REG
|| code0
== SUBREG
)
2489 if (code1
== REG
|| code1
== SUBREG
)
2491 indx
= op0
; /* index + base */
2497 base
= op0
; /* base + displacement */
2502 else if (code0
== MULT
)
2504 indx
= XEXP (op0
, 0);
2505 scale
= XEXP (op0
, 1);
2507 if (code1
== REG
|| code1
== SUBREG
)
2508 base
= op1
; /* index*scale + base */
2511 disp
= op1
; /* index*scale + disp */
2514 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2516 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2517 scale
= XEXP (XEXP (op0
, 0), 1);
2518 base
= XEXP (op0
, 1);
2522 else if (code0
== PLUS
)
2524 indx
= XEXP (op0
, 0); /* index + base + disp */
2525 base
= XEXP (op0
, 1);
2531 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2536 else if (GET_CODE (addr
) == MULT
)
2538 indx
= XEXP (addr
, 0); /* index*scale */
2539 scale
= XEXP (addr
, 1);
2543 disp
= addr
; /* displacement */
2545 /* Allow arg pointer and stack pointer as index if there is not scaling */
2546 if (base
&& indx
&& !scale
2547 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2554 /* Validate base register:
2556 Don't allow SUBREG's here, it can lead to spill failures when the base
2557 is one word out of a two word structure, which is represented internally
2562 if (GET_CODE (base
) != REG
)
2564 ADDR_INVALID ("Base is not a register.\n", base
);
2568 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
2569 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2571 ADDR_INVALID ("Base is not valid.\n", base
);
2576 /* Validate index register:
2578 Don't allow SUBREG's here, it can lead to spill failures when the index
2579 is one word out of a two word structure, which is represented internally
2583 if (GET_CODE (indx
) != REG
)
2585 ADDR_INVALID ("Index is not a register.\n", indx
);
2589 if ((strict
&& ! REG_OK_FOR_INDEX_STRICT_P (indx
))
2590 || (! strict
&& ! REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2592 ADDR_INVALID ("Index is not valid.\n", indx
);
2597 abort (); /* scale w/o index invalid */
2599 /* Validate scale factor: */
2602 HOST_WIDE_INT value
;
2604 if (GET_CODE (scale
) != CONST_INT
)
2606 ADDR_INVALID ("Scale is not valid.\n", scale
);
2610 value
= INTVAL (scale
);
2611 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2613 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2618 /* Validate displacement
2619 Constant pool addresses must be handled special. They are
2620 considered legitimate addresses, but only if not used with regs.
2621 When printed, the output routines know to print the reference with the
2622 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2625 if (GET_CODE (disp
) == SYMBOL_REF
2626 && CONSTANT_POOL_ADDRESS_P (disp
)
2631 else if (!CONSTANT_ADDRESS_P (disp
))
2633 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2637 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2639 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2643 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2644 && base
!= pic_offset_table_rtx
2645 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2647 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2651 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2652 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2654 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2660 if (TARGET_DEBUG_ADDR
)
2661 fprintf (stderr
, "Address is valid.\n");
2663 /* Everything looks valid, return true */
2667 /* Return a legitimate reference for ORIG (an address) using the
2668 register REG. If REG is 0, a new pseudo is generated.
2670 There are three types of references that must be handled:
2672 1. Global data references must load the address from the GOT, via
2673 the PIC reg. An insn is emitted to do this load, and the reg is
2676 2. Static data references must compute the address as an offset
2677 from the GOT, whose base is in the PIC reg. An insn is emitted to
2678 compute the address into a reg, and the reg is returned. Static
2679 data objects have SYMBOL_REF_FLAG set to differentiate them from
2680 global data objects.
2682 3. Constant pool addresses must be handled special. They are
2683 considered legitimate addresses, but only if not used with regs.
2684 When printed, the output routines know to print the reference with the
2685 PIC reg, even though the PIC reg doesn't appear in the RTL.
2687 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2688 reg also appears in the address (except for constant pool references,
2691 "switch" statements also require special handling when generating
2692 PIC code. See comments by the `casesi' insn in i386.md for details. */
2695 legitimize_pic_address (orig
, reg
)
2702 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2704 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2709 reg
= gen_reg_rtx (Pmode
);
2711 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2712 || GET_CODE (addr
) == LABEL_REF
)
2713 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2715 new = gen_rtx_MEM (Pmode
,
2716 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
));
2718 emit_move_insn (reg
, new);
2720 current_function_uses_pic_offset_table
= 1;
2724 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2728 if (GET_CODE (addr
) == CONST
)
2730 addr
= XEXP (addr
, 0);
2731 if (GET_CODE (addr
) != PLUS
)
2735 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2739 reg
= gen_reg_rtx (Pmode
);
2741 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2742 addr
= legitimize_pic_address (XEXP (addr
, 1),
2743 base
== reg
? NULL_RTX
: reg
);
2745 if (GET_CODE (addr
) == CONST_INT
)
2746 return plus_constant (base
, INTVAL (addr
));
2748 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2750 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2751 addr
= XEXP (addr
, 1);
2754 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2759 /* Emit insns to move operands[1] into operands[0]. */
2762 emit_pic_move (operands
, mode
)
2764 enum machine_mode mode
;
2766 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2768 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2769 operands
[1] = force_reg (SImode
, operands
[1]);
2771 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2774 /* Try machine-dependent ways of modifying an illegitimate address
2775 to be legitimate. If we find one, return the new, valid address.
2776 This macro is used in only one place: `memory_address' in explow.c.
2778 OLDX is the address as it was before break_out_memory_refs was called.
2779 In some cases it is useful to look at this to decide what needs to be done.
2781 MODE and WIN are passed so that this macro can use
2782 GO_IF_LEGITIMATE_ADDRESS.
2784 It is always safe for this macro to do nothing. It exists to recognize
2785 opportunities to optimize the output.
2787 For the 80386, we handle X+REG by loading X into a register R and
2788 using R+REG. R will go in a general reg and indexing will be used.
2789 However, if REG is a broken-out memory address or multiplication,
2790 nothing needs to be done because REG can certainly go in a general reg.
2792 When -fpic is used, special handling is needed for symbolic references.
2793 See comments by legitimize_pic_address in i386.c for details. */
2796 legitimize_address (x
, oldx
, mode
)
2799 enum machine_mode mode
;
2804 if (TARGET_DEBUG_ADDR
)
2806 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2807 GET_MODE_NAME (mode
));
2811 if (flag_pic
&& SYMBOLIC_CONST (x
))
2812 return legitimize_pic_address (x
, 0);
2814 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2815 if (GET_CODE (x
) == ASHIFT
2816 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2817 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2820 x
= gen_rtx (MULT
, Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2821 GEN_INT (1 << log
));
2824 if (GET_CODE (x
) == PLUS
)
2826 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2828 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2829 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2830 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2833 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2834 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2835 GEN_INT (1 << log
));
2838 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2839 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2840 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2843 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2844 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2845 GEN_INT (1 << log
));
2848 /* Put multiply first if it isn't already. */
2849 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2851 rtx tmp
= XEXP (x
, 0);
2852 XEXP (x
, 0) = XEXP (x
, 1);
2857 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2858 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2859 created by virtual register instantiation, register elimination, and
2860 similar optimizations. */
2861 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2864 x
= gen_rtx (PLUS
, Pmode
,
2865 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0),
2866 XEXP (XEXP (x
, 1), 0)),
2867 XEXP (XEXP (x
, 1), 1));
2871 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2872 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2873 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2874 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2875 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2876 && CONSTANT_P (XEXP (x
, 1)))
2879 rtx other
= NULL_RTX
;
2881 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2883 constant
= XEXP (x
, 1);
2884 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2886 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2888 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2889 other
= XEXP (x
, 1);
2897 x
= gen_rtx (PLUS
, Pmode
,
2898 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2899 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2900 plus_constant (other
, INTVAL (constant
)));
2904 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2907 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2910 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2913 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2916 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2920 && GET_CODE (XEXP (x
, 1)) == REG
2921 && GET_CODE (XEXP (x
, 0)) == REG
)
2924 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2927 x
= legitimize_pic_address (x
, 0);
2930 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2933 if (GET_CODE (XEXP (x
, 0)) == REG
)
2935 register rtx temp
= gen_reg_rtx (Pmode
);
2936 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2938 emit_move_insn (temp
, val
);
2944 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2946 register rtx temp
= gen_reg_rtx (Pmode
);
2947 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2949 emit_move_insn (temp
, val
);
2959 /* Print an integer constant expression in assembler syntax. Addition
2960 and subtraction are the only arithmetic that may appear in these
2961 expressions. FILE is the stdio stream to write to, X is the rtx, and
2962 CODE is the operand print code from the output string. */
2965 output_pic_addr_const (file
, x
, code
)
2972 switch (GET_CODE (x
))
2983 if (GET_CODE (x
) == SYMBOL_REF
)
2984 assemble_name (file
, XSTR (x
, 0));
2987 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2988 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2989 assemble_name (asm_out_file
, buf
);
2993 ; /* No suffix, dammit. */
2994 else if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2995 fprintf (file
, "@GOTOFF(%%ebx)");
2996 else if (code
== 'P')
2997 fprintf (file
, "@PLT");
2998 else if (GET_CODE (x
) == LABEL_REF
)
2999 fprintf (file
, "@GOTOFF");
3000 else if (! SYMBOL_REF_FLAG (x
))
3001 fprintf (file
, "@GOT");
3003 fprintf (file
, "@GOTOFF");
3008 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3009 assemble_name (asm_out_file
, buf
);
3013 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3017 /* This used to output parentheses around the expression,
3018 but that does not work on the 386 (either ATT or BSD assembler). */
3019 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3023 if (GET_MODE (x
) == VOIDmode
)
3025 /* We can use %d if the number is <32 bits and positive. */
3026 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
3027 fprintf (file
, "0x%lx%08lx",
3028 (unsigned long) CONST_DOUBLE_HIGH (x
),
3029 (unsigned long) CONST_DOUBLE_LOW (x
));
3031 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3034 /* We can't handle floating point constants;
3035 PRINT_OPERAND must handle them. */
3036 output_operand_lossage ("floating constant misused");
3040 /* Some assemblers need integer constants to appear first. */
3041 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3043 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3044 if (INTVAL (XEXP (x
, 1)) >= 0)
3045 fprintf (file
, "+");
3046 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3050 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3051 if (INTVAL (XEXP (x
, 0)) >= 0)
3052 fprintf (file
, "+");
3053 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3058 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3059 fprintf (file
, "-");
3060 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3064 output_operand_lossage ("invalid expression as operand");
3068 /* Append the correct conditional move suffix which corresponds to CODE. */
3071 put_condition_code (code
, reverse_cc
, mode
, file
)
3074 enum mode_class mode
;
3077 int ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
3078 && ! (cc_prev_status
.flags
& CC_FCOMI
));
3079 if (reverse_cc
&& ! ieee
)
3080 code
= reverse_condition (code
);
3082 if (mode
== MODE_INT
)
3086 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3093 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3100 if (cc_prev_status
.flags
& CC_NO_OVERFLOW
)
3115 if (cc_prev_status
.flags
& CC_NO_OVERFLOW
)
3138 output_operand_lossage ("Invalid %%C operand");
3141 else if (mode
== MODE_FLOAT
)
3145 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
);
3148 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
);
3151 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3154 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3157 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3160 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3163 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3166 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3169 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3172 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3175 output_operand_lossage ("Invalid %%C operand");
3180 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3181 C -- print opcode suffix for set/cmov insn.
3182 c -- like C, but print reversed condition
3183 F -- print opcode suffix for fcmov insn.
3184 f -- like C, but print reversed condition
3185 R -- print the prefix for register names.
3186 z -- print the opcode suffix for the size of the current operand.
3187 * -- print a star (in certain assembler syntax)
3188 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3189 c -- don't print special prefixes before constant operands.
3190 J -- print the appropriate jump operand.
3191 s -- print a shift double count, followed by the assemblers argument
3193 b -- print the QImode name of the register for the indicated operand.
3194 %b0 would print %al if operands[0] is reg 0.
3195 w -- likewise, print the HImode name of the register.
3196 k -- likewise, print the SImode name of the register.
3197 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3198 y -- print "st(0)" instead of "st" as a register.
3199 P -- print as a PIC constant */
3202 print_operand (file
, x
, code
)
3217 PUT_OP_SIZE (code
, 'l', file
);
3221 PUT_OP_SIZE (code
, 'w', file
);
3225 PUT_OP_SIZE (code
, 'b', file
);
3229 PUT_OP_SIZE (code
, 'l', file
);
3233 PUT_OP_SIZE (code
, 's', file
);
3237 PUT_OP_SIZE (code
, 't', file
);
3241 /* 387 opcodes don't get size suffixes if the operands are
3244 if (STACK_REG_P (x
))
3247 /* this is the size of op from size of operand */
3248 switch (GET_MODE_SIZE (GET_MODE (x
)))
3251 PUT_OP_SIZE ('B', 'b', file
);
3255 PUT_OP_SIZE ('W', 'w', file
);
3259 if (GET_MODE (x
) == SFmode
)
3261 PUT_OP_SIZE ('S', 's', file
);
3265 PUT_OP_SIZE ('L', 'l', file
);
3269 PUT_OP_SIZE ('T', 't', file
);
3273 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3275 #ifdef GAS_MNEMONICS
3276 PUT_OP_SIZE ('Q', 'q', file
);
3279 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3283 PUT_OP_SIZE ('Q', 'l', file
);
3297 switch (GET_CODE (x
))
3299 /* These conditions are appropriate for testing the result
3300 of an arithmetic operation, not for a compare operation.
3301 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3302 CC_Z_IN_NOT_C false and not floating point. */
3303 case NE
: fputs ("jne", file
); return;
3304 case EQ
: fputs ("je", file
); return;
3305 case GE
: fputs ("jns", file
); return;
3306 case LT
: fputs ("js", file
); return;
3307 case GEU
: fputs ("jmp", file
); return;
3308 case GTU
: fputs ("jne", file
); return;
3309 case LEU
: fputs ("je", file
); return;
3310 case LTU
: fputs ("#branch never", file
); return;
3312 /* no matching branches for GT nor LE */
3319 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3321 PRINT_OPERAND (file
, x
, 0);
3322 fputs (AS2C (,) + 1, file
);
3327 /* This is used by the conditional move instructions. */
3329 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3332 /* Like above, but reverse condition */
3334 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3337 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3340 /* Like above, but reverse condition */
3342 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3349 sprintf (str
, "invalid operand code `%c'", code
);
3350 output_operand_lossage (str
);
3355 if (GET_CODE (x
) == REG
)
3357 PRINT_REG (x
, code
, file
);
3360 else if (GET_CODE (x
) == MEM
)
3362 PRINT_PTR (x
, file
);
3363 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3366 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3368 output_addr_const (file
, XEXP (x
, 0));
3371 output_address (XEXP (x
, 0));
3374 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3379 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3380 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3381 PRINT_IMMED_PREFIX (file
);
3382 fprintf (file
, "0x%lx", l
);
3385 /* These float cases don't actually occur as immediate operands. */
3386 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3391 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3392 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3393 fprintf (file
, "%s", dstr
);
3396 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3401 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3402 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3403 fprintf (file
, "%s", dstr
);
3409 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3410 PRINT_IMMED_PREFIX (file
);
3411 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3412 || GET_CODE (x
) == LABEL_REF
)
3413 PRINT_OFFSET_PREFIX (file
);
3416 output_pic_addr_const (file
, x
, code
);
3418 output_addr_const (file
, x
);
3422 /* Print a memory operand whose address is ADDR. */
3425 print_operand_address (file
, addr
)
3429 register rtx reg1
, reg2
, breg
, ireg
;
3432 switch (GET_CODE (addr
))
3436 fprintf (file
, "%se", RP
);
3437 fputs (hi_reg_name
[REGNO (addr
)], file
);
3447 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3449 offset
= XEXP (addr
, 0);
3450 addr
= XEXP (addr
, 1);
3452 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3454 offset
= XEXP (addr
, 1);
3455 addr
= XEXP (addr
, 0);
3458 if (GET_CODE (addr
) != PLUS
)
3460 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3461 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3462 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3463 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3464 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3465 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3466 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3467 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3469 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3486 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3487 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3492 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3498 if (ireg
!= 0 || breg
!= 0)
3505 output_pic_addr_const (file
, addr
, 0);
3506 else if (GET_CODE (addr
) == LABEL_REF
)
3507 output_asm_label (addr
);
3509 output_addr_const (file
, addr
);
3512 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3514 scale
= INTVAL (XEXP (ireg
, 1));
3515 ireg
= XEXP (ireg
, 0);
3518 /* The stack pointer can only appear as a base register,
3519 never an index register, so exchange the regs if it is wrong. */
3521 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3530 /* output breg+ireg*scale */
3531 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3539 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3541 scale
= INTVAL (XEXP (addr
, 0));
3542 ireg
= XEXP (addr
, 1);
3546 scale
= INTVAL (XEXP (addr
, 1));
3547 ireg
= XEXP (addr
, 0);
3550 output_addr_const (file
, const0_rtx
);
3551 PRINT_B_I_S (NULL_RTX
, ireg
, scale
, file
);
3556 if (GET_CODE (addr
) == CONST_INT
3557 && INTVAL (addr
) < 0x8000
3558 && INTVAL (addr
) >= -0x8000)
3559 fprintf (file
, "%d", (int) INTVAL (addr
));
3563 output_pic_addr_const (file
, addr
, 0);
3565 output_addr_const (file
, addr
);
3570 /* Set the cc_status for the results of an insn whose pattern is EXP.
3571 On the 80386, we assume that only test and compare insns, as well
3572 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3573 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3574 Also, we assume that jumps, moves and sCOND don't affect the condition
3575 codes. All else clobbers the condition codes, by assumption.
3577 We assume that ALL integer add, minus, etc. instructions effect the
3578 condition codes. This MUST be consistent with i386.md.
3580 We don't record any float test or compare - the redundant test &
3581 compare check in final.c does not handle stack-like regs correctly. */
3584 notice_update_cc (exp
)
3587 if (GET_CODE (exp
) == SET
)
3589 /* Jumps do not alter the cc's. */
3590 if (SET_DEST (exp
) == pc_rtx
)
3593 /* Moving register or memory into a register:
3594 it doesn't alter the cc's, but it might invalidate
3595 the RTX's which we remember the cc's came from.
3596 (Note that moving a constant 0 or 1 MAY set the cc's). */
3597 if (REG_P (SET_DEST (exp
))
3598 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3599 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3601 if (cc_status
.value1
3602 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3603 cc_status
.value1
= 0;
3605 if (cc_status
.value2
3606 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3607 cc_status
.value2
= 0;
3612 /* Moving register into memory doesn't alter the cc's.
3613 It may invalidate the RTX's which we remember the cc's came from. */
3614 if (GET_CODE (SET_DEST (exp
)) == MEM
3615 && (REG_P (SET_SRC (exp
))
3616 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3618 if (cc_status
.value1
3619 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3620 cc_status
.value1
= 0;
3621 if (cc_status
.value2
3622 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3623 cc_status
.value2
= 0;
3628 /* Function calls clobber the cc's. */
3629 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3635 /* Tests and compares set the cc's in predictable ways. */
3636 else if (SET_DEST (exp
) == cc0_rtx
)
3639 cc_status
.value1
= SET_SRC (exp
);
3643 /* Certain instructions effect the condition codes. */
3644 else if (GET_MODE (SET_SRC (exp
)) == SImode
3645 || GET_MODE (SET_SRC (exp
)) == HImode
3646 || GET_MODE (SET_SRC (exp
)) == QImode
)
3647 switch (GET_CODE (SET_SRC (exp
)))
3649 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3650 /* Shifts on the 386 don't set the condition codes if the
3651 shift count is zero. */
3652 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3658 /* We assume that the CONST_INT is non-zero (this rtx would
3659 have been deleted if it were zero. */
3661 case PLUS
: case MINUS
: case NEG
:
3662 case AND
: case IOR
: case XOR
:
3663 cc_status
.flags
= CC_NO_OVERFLOW
;
3664 cc_status
.value1
= SET_SRC (exp
);
3665 cc_status
.value2
= SET_DEST (exp
);
3668 /* This is the bsf pattern used by ffs. */
3670 if (XINT (SET_SRC (exp
), 1) == 5)
3672 /* Only the Z flag is defined after bsf. */
3674 = CC_NOT_POSITIVE
| CC_NOT_NEGATIVE
| CC_NO_OVERFLOW
;
3675 cc_status
.value1
= XVECEXP (SET_SRC (exp
), 0, 0);
3688 else if (GET_CODE (exp
) == PARALLEL
3689 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3691 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3693 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3697 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3699 cc_status
.flags
|= CC_IN_80387
;
3700 if (0 && TARGET_CMOVE
&& stack_regs_mentioned_p
3701 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3702 cc_status
.flags
|= CC_FCOMI
;
3705 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3717 /* Split one or more DImode RTL references into pairs of SImode
3718 references. The RTL can be REG, offsettable MEM, integer constant, or
3719 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3720 split and "num" is its length. lo_half and hi_half are output arrays
3721 that parallel "operands". */
3724 split_di (operands
, num
, lo_half
, hi_half
)
3727 rtx lo_half
[], hi_half
[];
3731 rtx op
= operands
[num
];
3732 if (GET_CODE (op
) == REG
)
3734 lo_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
));
3735 hi_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
) + 1);
3737 else if (CONSTANT_P (op
))
3738 split_double (op
, &lo_half
[num
], &hi_half
[num
]);
3739 else if (offsettable_memref_p (op
))
3741 rtx lo_addr
= XEXP (op
, 0);
3742 rtx hi_addr
= XEXP (adj_offsettable_operand (op
, 4), 0);
3743 lo_half
[num
] = change_address (op
, SImode
, lo_addr
);
3744 hi_half
[num
] = change_address (op
, SImode
, hi_addr
);
3751 /* Return 1 if this is a valid binary operation on a 387.
3752 OP is the expression matched, and MODE is its mode. */
3755 binary_387_op (op
, mode
)
3757 enum machine_mode mode
;
3759 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3762 switch (GET_CODE (op
))
3768 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3775 /* Return 1 if this is a valid shift or rotate operation on a 386.
3776 OP is the expression matched, and MODE is its mode. */
3781 enum machine_mode mode
;
3783 rtx operand
= XEXP (op
, 0);
3785 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3788 if (GET_MODE (operand
) != GET_MODE (op
)
3789 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3792 return (GET_CODE (op
) == ASHIFT
3793 || GET_CODE (op
) == ASHIFTRT
3794 || GET_CODE (op
) == LSHIFTRT
3795 || GET_CODE (op
) == ROTATE
3796 || GET_CODE (op
) == ROTATERT
);
3799 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3800 MODE is not used. */
3803 VOIDmode_compare_op (op
, mode
)
3805 enum machine_mode mode
;
3807 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3810 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3811 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3812 is the expression of the binary operation. The output may either be
3813 emitted here, or returned to the caller, like all output_* functions.
3815 There is no guarantee that the operands are the same mode, as they
3816 might be within FLOAT or FLOAT_EXTEND expressions. */
3819 output_387_binary_op (insn
, operands
)
3825 static char buf
[100];
3827 switch (GET_CODE (operands
[3]))
3830 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3831 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3838 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3839 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3846 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3847 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3854 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3855 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3865 strcpy (buf
, base_op
);
3867 switch (GET_CODE (operands
[3]))
3871 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3874 operands
[2] = operands
[1];
3878 if (GET_CODE (operands
[2]) == MEM
)
3879 return strcat (buf
, AS1 (%z2
,%2));
3881 if (NON_STACK_REG_P (operands
[1]))
3883 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3887 else if (NON_STACK_REG_P (operands
[2]))
3889 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3893 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3895 if (STACK_TOP_P (operands
[0]))
3896 return strcat (buf
, AS2 (p
,%0,%2));
3898 return strcat (buf
, AS2 (p
,%2,%0));
3901 if (STACK_TOP_P (operands
[0]))
3902 return strcat (buf
, AS2C (%y2
,%0));
3904 return strcat (buf
, AS2C (%2,%0));
3908 if (GET_CODE (operands
[1]) == MEM
)
3909 return strcat (buf
, AS1 (r
%z1
,%1));
3911 if (GET_CODE (operands
[2]) == MEM
)
3912 return strcat (buf
, AS1 (%z2
,%2));
3914 if (NON_STACK_REG_P (operands
[1]))
3916 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3920 else if (NON_STACK_REG_P (operands
[2]))
3922 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3926 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3929 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3931 if (STACK_TOP_P (operands
[0]))
3932 return strcat (buf
, AS2 (p
,%0,%2));
3934 return strcat (buf
, AS2 (rp
,%2,%0));
3937 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3939 if (STACK_TOP_P (operands
[0]))
3940 return strcat (buf
, AS2 (rp
,%0,%1));
3942 return strcat (buf
, AS2 (p
,%1,%0));
3945 if (STACK_TOP_P (operands
[0]))
3947 if (STACK_TOP_P (operands
[1]))
3948 return strcat (buf
, AS2C (%y2
,%0));
3950 return strcat (buf
, AS2 (r
,%y1
,%0));
3952 else if (STACK_TOP_P (operands
[1]))
3953 return strcat (buf
, AS2C (%1,%0));
3955 return strcat (buf
, AS2 (r
,%2,%0));
3962 /* Output code for INSN to convert a float to a signed int. OPERANDS
3963 are the insn operands. The output may be SFmode or DFmode and the
3964 input operand may be SImode or DImode. As a special case, make sure
3965 that the 387 stack top dies if the output mode is DImode, because the
3966 hardware requires this. */
3969 output_fix_trunc (insn
, operands
)
3973 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3976 if (! STACK_TOP_P (operands
[1]))
3979 xops
[0] = GEN_INT (12);
3980 xops
[1] = operands
[4];
3982 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3983 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3984 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3985 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3986 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3988 if (NON_STACK_REG_P (operands
[0]))
3989 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
3991 else if (GET_CODE (operands
[0]) == MEM
)
3994 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3995 else if (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
)
3997 /* There is no DImode version of this without a stack pop, so
3998 we must emulate it. It doesn't matter much what the second
3999 instruction is, because the value being pushed on the FP stack
4000 is not used except for the following stack popping store.
4001 This case can only happen without optimization, so it doesn't
4002 matter that it is inefficient. */
4003 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
4004 output_asm_insn (AS1 (fild
%z0
,%0), operands
);
4007 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
4012 return AS1 (fldc
%W2
,%2);
4015 /* Output code for INSN to compare OPERANDS. The two operands might
4016 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4017 expression. If the compare is in mode CCFPEQmode, use an opcode that
4018 will not fault if a qNaN is present. */
4021 output_float_compare (insn
, operands
)
4026 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
4027 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
4030 if (0 && TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
4032 cc_status
.flags
|= CC_FCOMI
;
4033 cc_prev_status
.flags
&= ~CC_TEST_AX
;
4036 if (! STACK_TOP_P (operands
[0]))
4039 operands
[0] = operands
[1];
4041 cc_status
.flags
|= CC_REVERSED
;
4044 if (! STACK_TOP_P (operands
[0]))
4047 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
4049 if (STACK_REG_P (operands
[1])
4051 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
4052 && REGNO (operands
[1]) != FIRST_STACK_REG
)
4054 /* If both the top of the 387 stack dies, and the other operand
4055 is also a stack register that dies, then this must be a
4056 `fcompp' float compare */
4058 if (unordered_compare
)
4060 if (cc_status
.flags
& CC_FCOMI
)
4062 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
4063 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4067 output_asm_insn ("fucompp", operands
);
4071 if (cc_status
.flags
& CC_FCOMI
)
4073 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
4074 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4078 output_asm_insn ("fcompp", operands
);
4083 static char buf
[100];
4085 /* Decide if this is the integer or float compare opcode, or the
4086 unordered float compare. */
4088 if (unordered_compare
)
4089 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
4090 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
4091 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
4093 strcpy (buf
, "ficom");
4095 /* Modify the opcode if the 387 stack is to be popped. */
4100 if (NON_STACK_REG_P (operands
[1]))
4101 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4102 else if (cc_status
.flags
& CC_FCOMI
)
4104 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
4108 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
4111 /* Now retrieve the condition code. */
4113 return output_fp_cc0_set (insn
);
4116 /* Output opcodes to transfer the results of FP compare or test INSN
4117 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4118 result of the compare or test is unordered, no comparison operator
4119 succeeds except NE. Return an output template, if any. */
4122 output_fp_cc0_set (insn
)
4129 xops
[0] = gen_rtx_REG (HImode
, 0);
4130 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
4132 if (! TARGET_IEEE_FP
)
4134 if (!(cc_status
.flags
& CC_REVERSED
))
4136 next
= next_cc0_user (insn
);
4138 if (GET_CODE (next
) == JUMP_INSN
4139 && GET_CODE (PATTERN (next
)) == SET
4140 && SET_DEST (PATTERN (next
)) == pc_rtx
4141 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4142 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4143 else if (GET_CODE (PATTERN (next
)) == SET
)
4144 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4148 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
4149 || code
== LE
|| code
== GE
)
4151 /* We will test eax directly. */
4152 cc_status
.flags
|= CC_TEST_AX
;
4160 next
= next_cc0_user (insn
);
4161 if (next
== NULL_RTX
)
4164 if (GET_CODE (next
) == JUMP_INSN
4165 && GET_CODE (PATTERN (next
)) == SET
4166 && SET_DEST (PATTERN (next
)) == pc_rtx
4167 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4168 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4169 else if (GET_CODE (PATTERN (next
)) == SET
)
4171 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4172 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4174 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4177 else if (GET_CODE (PATTERN (next
)) == PARALLEL
4178 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
4180 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
4181 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
4183 code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
4188 xops
[0] = gen_rtx_REG (QImode
, 0);
4193 xops
[1] = GEN_INT (0x45);
4194 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4199 xops
[1] = GEN_INT (0x45);
4200 xops
[2] = GEN_INT (0x01);
4201 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4202 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4207 xops
[1] = GEN_INT (0x05);
4208 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4213 xops
[1] = GEN_INT (0x45);
4214 xops
[2] = GEN_INT (0x40);
4215 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4216 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
4217 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4222 xops
[1] = GEN_INT (0x45);
4223 xops
[2] = GEN_INT (0x40);
4224 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4225 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4230 xops
[1] = GEN_INT (0x44);
4231 xops
[2] = GEN_INT (0x40);
4232 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4233 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
4248 #define MAX_386_STACK_LOCALS 2
4250 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4252 /* Define the structure for the machine field in struct function. */
4253 struct machine_function
4255 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4257 char pic_label_name
[256];
4260 /* Functions to save and restore i386_stack_locals.
4261 These will be called, via pointer variables,
4262 from push_function_context and pop_function_context. */
4265 save_386_machine_status (p
)
4269 = (struct machine_function
*) xmalloc (sizeof (struct machine_function
));
4270 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
4271 sizeof i386_stack_locals
);
4272 p
->machine
->pic_label_rtx
= pic_label_rtx
;
4273 bcopy (pic_label_name
, p
->machine
->pic_label_name
, 256);
4277 restore_386_machine_status (p
)
4280 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
4281 sizeof i386_stack_locals
);
4282 pic_label_rtx
= p
->machine
->pic_label_rtx
;
4283 bcopy (p
->machine
->pic_label_name
, pic_label_name
, 256);
4288 /* Clear stack slot assignments remembered from previous functions.
4289 This is called from INIT_EXPANDERS once before RTL is emitted for each
4293 clear_386_stack_locals ()
4295 enum machine_mode mode
;
4298 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4299 mode
= (enum machine_mode
) ((int) mode
+ 1))
4300 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4301 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4303 pic_label_rtx
= NULL_RTX
;
4304 bzero (pic_label_name
, 256);
4305 /* Arrange to save and restore i386_stack_locals around nested functions. */
4306 save_machine_status
= save_386_machine_status
;
4307 restore_machine_status
= restore_386_machine_status
;
4310 /* Return a MEM corresponding to a stack slot with mode MODE.
4311 Allocate a new slot if necessary.
4313 The RTL for a function can have several slots available: N is
4314 which slot to use. */
4317 assign_386_stack_local (mode
, n
)
4318 enum machine_mode mode
;
4321 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4324 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4325 i386_stack_locals
[(int) mode
][n
]
4326 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4328 return i386_stack_locals
[(int) mode
][n
];
4333 enum machine_mode mode
;
4335 return (GET_CODE (op
) == MULT
);
4340 enum machine_mode mode
;
4342 return (GET_CODE (op
) == DIV
);
4346 /* Create a new copy of an rtx.
4347 Recursively copies the operands of the rtx,
4348 except for those few rtx codes that are sharable.
4349 Doesn't share CONST */
4357 register RTX_CODE code
;
4358 register char *format_ptr
;
4360 code
= GET_CODE (orig
);
4373 /* SCRATCH must be shared because they represent distinct values. */
4378 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4379 a LABEL_REF, it isn't sharable. */
4380 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4381 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4382 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4386 /* A MEM with a constant address is not sharable. The problem is that
4387 the constant address may need to be reloaded. If the mem is shared,
4388 then reloading one copy of this mem will cause all copies to appear
4389 to have been reloaded. */
4392 copy
= rtx_alloc (code
);
4393 PUT_MODE (copy
, GET_MODE (orig
));
4394 copy
->in_struct
= orig
->in_struct
;
4395 copy
->volatil
= orig
->volatil
;
4396 copy
->unchanging
= orig
->unchanging
;
4397 copy
->integrated
= orig
->integrated
;
4399 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4401 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4403 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4405 switch (*format_ptr
++)
4408 XEXP (copy
, i
) = XEXP (orig
, i
);
4409 if (XEXP (orig
, i
) != NULL
)
4410 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4415 XEXP (copy
, i
) = XEXP (orig
, i
);
4420 XVEC (copy
, i
) = XVEC (orig
, i
);
4421 if (XVEC (orig
, i
) != NULL
)
4423 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4424 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4425 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4430 XWINT (copy
, i
) = XWINT (orig
, i
);
4434 XINT (copy
, i
) = XINT (orig
, i
);
4439 XSTR (copy
, i
) = XSTR (orig
, i
);
4450 /* Try to rewrite a memory address to make it valid */
4453 rewrite_address (mem_rtx
)
4456 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4458 int offset_adjust
= 0;
4459 int was_only_offset
= 0;
4460 rtx mem_addr
= XEXP (mem_rtx
, 0);
4461 char *storage
= oballoc (0);
4463 int is_spill_rtx
= 0;
4465 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4466 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4468 if (GET_CODE (mem_addr
) == PLUS
4469 && GET_CODE (XEXP (mem_addr
, 1)) == PLUS
4470 && GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4472 /* This part is utilized by the combiner. */
4474 = gen_rtx (PLUS
, GET_MODE (mem_addr
),
4475 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4476 XEXP (mem_addr
, 0), XEXP (XEXP (mem_addr
, 1), 0)),
4477 XEXP (XEXP (mem_addr
, 1), 1));
4479 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4481 XEXP (mem_rtx
, 0) = ret_rtx
;
4482 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4489 /* This part is utilized by loop.c.
4490 If the address contains PLUS (reg,const) and this pattern is invalid
4491 in this case - try to rewrite the address to make it valid. */
4492 storage
= oballoc (0);
4493 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4495 /* Find the base index and offset elements of the memory address. */
4496 if (GET_CODE (mem_addr
) == PLUS
)
4498 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4500 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4501 base_rtx
= XEXP (mem_addr
, 1), index_rtx
= XEXP (mem_addr
, 0);
4503 base_rtx
= XEXP (mem_addr
, 0), offset_rtx
= XEXP (mem_addr
, 1);
4506 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4508 index_rtx
= XEXP (mem_addr
, 0);
4509 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4510 base_rtx
= XEXP (mem_addr
, 1);
4512 offset_rtx
= XEXP (mem_addr
, 1);
4515 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4517 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
4518 && GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
4519 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0))
4521 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1))
4523 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1))
4525 && GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
4526 && GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4528 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4529 offset_rtx
= XEXP (mem_addr
, 1);
4530 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4531 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4535 offset_rtx
= XEXP (mem_addr
, 1);
4536 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4537 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4541 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4543 was_only_offset
= 1;
4546 offset_rtx
= XEXP (mem_addr
, 1);
4547 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4548 if (offset_adjust
== 0)
4550 XEXP (mem_rtx
, 0) = offset_rtx
;
4551 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4561 else if (GET_CODE (mem_addr
) == MULT
)
4562 index_rtx
= mem_addr
;
4569 if (index_rtx
!= 0 && GET_CODE (index_rtx
) == MULT
)
4571 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4577 scale_rtx
= XEXP (index_rtx
, 1);
4578 scale
= INTVAL (scale_rtx
);
4579 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4582 /* Now find which of the elements are invalid and try to fix them. */
4583 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4585 offset_adjust
= INTVAL (index_rtx
) * scale
;
4587 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4588 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4589 else if (offset_rtx
== 0)
4590 offset_rtx
= const0_rtx
;
4592 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4593 XEXP (mem_rtx
, 0) = offset_rtx
;
4597 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
4598 && GET_CODE (XEXP (base_rtx
, 0)) == REG
4599 && GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4601 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4602 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4605 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4607 offset_adjust
+= INTVAL (base_rtx
);
4611 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
4612 && GET_CODE (XEXP (index_rtx
, 0)) == REG
4613 && GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4615 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4616 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4621 if (! LEGITIMATE_INDEX_P (index_rtx
)
4622 && ! (index_rtx
== stack_pointer_rtx
&& scale
== 1
4623 && base_rtx
== NULL
))
4632 if (! LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4639 if (offset_adjust
!= 0)
4641 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4642 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4644 offset_rtx
= const0_rtx
;
4652 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4653 gen_rtx (MULT
, GET_MODE (index_rtx
),
4654 index_rtx
, scale_rtx
),
4657 if (GET_CODE (offset_rtx
) != CONST_INT
4658 || INTVAL (offset_rtx
) != 0)
4659 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4660 ret_rtx
, offset_rtx
);
4664 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4665 index_rtx
, base_rtx
);
4667 if (GET_CODE (offset_rtx
) != CONST_INT
4668 || INTVAL (offset_rtx
) != 0)
4669 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4670 ret_rtx
, offset_rtx
);
4677 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
),
4678 index_rtx
, scale_rtx
);
4680 if (GET_CODE (offset_rtx
) != CONST_INT
4681 || INTVAL (offset_rtx
) != 0)
4682 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4683 ret_rtx
, offset_rtx
);
4687 if (GET_CODE (offset_rtx
) == CONST_INT
4688 && INTVAL (offset_rtx
) == 0)
4689 ret_rtx
= index_rtx
;
4691 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4692 index_rtx
, offset_rtx
);
4700 if (GET_CODE (offset_rtx
) == CONST_INT
4701 && INTVAL (offset_rtx
) == 0)
4704 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
,
4707 else if (was_only_offset
)
4708 ret_rtx
= offset_rtx
;
4716 XEXP (mem_rtx
, 0) = ret_rtx
;
4717 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4728 /* Return 1 if the first insn to set cc before INSN also sets the register
4729 REG_RTX; otherwise return 0. */
4731 last_to_set_cc (reg_rtx
, insn
)
4734 rtx prev_insn
= PREV_INSN (insn
);
4738 if (GET_CODE (prev_insn
) == NOTE
)
4741 else if (GET_CODE (prev_insn
) == INSN
)
4743 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4746 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4748 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4754 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4761 prev_insn
= PREV_INSN (prev_insn
);
4768 doesnt_set_condition_code (pat
)
4771 switch (GET_CODE (pat
))
4784 sets_condition_code (pat
)
4787 switch (GET_CODE (pat
))
4809 str_immediate_operand (op
, mode
)
4811 enum machine_mode mode
;
4813 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4823 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4824 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4825 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4826 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4832 /* Return 1 if the mode of the SET_DEST of insn is floating point
4833 and it is not an fld or a move from memory to memory.
4834 Otherwise return 0 */
4840 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4841 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4842 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4843 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4844 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4845 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4846 && GET_CODE (SET_SRC (insn
)) != MEM
)
4852 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4853 memory and the source is a register. */
4859 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4860 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4861 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4862 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4863 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4864 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4870 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4871 or index to reference memory.
4872 otherwise return 0 */
4875 agi_dependent (insn
, dep_insn
)
4878 if (GET_CODE (dep_insn
) == INSN
4879 && GET_CODE (PATTERN (dep_insn
)) == SET
4880 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4881 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
);
4883 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4884 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4885 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4886 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4887 return reg_mentioned_in_mem (stack_pointer_rtx
, insn
);
4892 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4893 otherwise return 0. */
4896 reg_mentioned_in_mem (reg
, rtl
)
4901 register enum rtx_code code
;
4906 code
= GET_CODE (rtl
);
4924 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4927 fmt
= GET_RTX_FORMAT (code
);
4928 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4932 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4933 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4937 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4944 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4946 operands[0] = result, initialized with the startaddress
4947 operands[1] = alignment of the address.
4948 operands[2] = scratch register, initialized with the startaddress when
4949 not aligned, otherwise undefined
4951 This is just the body. It needs the initialisations mentioned above and
4952 some address computing at the end. These things are done in i386.md. */
4955 output_strlen_unroll (operands
)
4960 xops
[0] = operands
[0]; /* Result */
4961 /* operands[1]; * Alignment */
4962 xops
[1] = operands
[2]; /* Scratch */
4963 xops
[2] = GEN_INT (0);
4964 xops
[3] = GEN_INT (2);
4965 xops
[4] = GEN_INT (3);
4966 xops
[5] = GEN_INT (4);
4967 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4968 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4969 xops
[8] = gen_label_rtx (); /* label of main loop */
4971 if (TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4972 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4974 xops
[10] = gen_label_rtx (); /* end label 2 */
4975 xops
[11] = gen_label_rtx (); /* end label 1 */
4976 xops
[12] = gen_label_rtx (); /* end label */
4977 /* xops[13] * Temporary used */
4978 xops
[14] = GEN_INT (0xff);
4979 xops
[15] = GEN_INT (0xff00);
4980 xops
[16] = GEN_INT (0xff0000);
4981 xops
[17] = GEN_INT (0xff000000);
4983 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4985 /* Is there a known alignment and is it less than 4? */
4986 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4988 /* Is there a known alignment and is it not 2? */
4989 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4991 xops
[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4992 xops
[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4994 /* Leave just the 3 lower bits.
4995 If this is a q-register, then the high part is used later
4996 therefore use andl rather than andb. */
4997 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4999 /* Is aligned to 4-byte address when zero */
5000 output_asm_insn (AS1 (je
,%l8
), xops
);
5002 /* Side-effect even Parity when %eax == 3 */
5003 output_asm_insn (AS1 (jp
,%6), xops
);
5005 /* Is it aligned to 2 bytes ? */
5006 if (QI_REG_P (xops
[1]))
5007 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
5009 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
5011 output_asm_insn (AS1 (je
,%7), xops
);
5015 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5016 check if is aligned to 4 - byte. */
5017 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
5019 /* Is aligned to 4-byte address when zero */
5020 output_asm_insn (AS1 (je
,%l8
), xops
);
5023 xops
[13] = gen_rtx_MEM (QImode
, xops
[0]);
5025 /* Now compare the bytes; compare with the high part of a q-reg
5026 gives shorter code. */
5027 if (QI_REG_P (xops
[1]))
5029 /* Compare the first n unaligned byte on a byte per byte basis. */
5030 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5032 /* When zero we reached the end. */
5033 output_asm_insn (AS1 (je
,%l12
), xops
);
5035 /* Increment the address. */
5036 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5038 /* Not needed with an alignment of 2 */
5039 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
5041 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5042 CODE_LABEL_NUMBER (xops
[7]));
5043 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5044 output_asm_insn (AS1 (je
,%l12
), xops
);
5045 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5047 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5048 CODE_LABEL_NUMBER (xops
[6]));
5051 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5055 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5056 output_asm_insn (AS1 (je
,%l12
), xops
);
5057 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5059 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5060 CODE_LABEL_NUMBER (xops
[7]));
5061 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5062 output_asm_insn (AS1 (je
,%l12
), xops
);
5063 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5065 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5066 CODE_LABEL_NUMBER (xops
[6]));
5067 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5070 output_asm_insn (AS1 (je
,%l12
), xops
);
5071 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5074 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5075 align this loop. It gives only huge programs, but does not help to
5077 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
5079 xops
[13] = gen_rtx_MEM (SImode
, xops
[0]);
5080 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
5082 if (QI_REG_P (xops
[1]))
5084 /* On i586 it is faster to combine the hi- and lo- part as
5085 a kind of lookahead. If anding both yields zero, then one
5086 of both *could* be zero, otherwise none of both is zero;
5087 this saves one instruction, on i486 this is slower
5088 tested with P-90, i486DX2-66, AMD486DX2-66 */
5091 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
5092 output_asm_insn (AS1 (jne
,%l9
), xops
);
5095 /* Check first byte. */
5096 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
5097 output_asm_insn (AS1 (je
,%l12
), xops
);
5099 /* Check second byte. */
5100 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
5101 output_asm_insn (AS1 (je
,%l11
), xops
);
5104 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5105 CODE_LABEL_NUMBER (xops
[9]));
5110 /* Check first byte. */
5111 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
5112 output_asm_insn (AS1 (je
,%l12
), xops
);
5114 /* Check second byte. */
5115 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
5116 output_asm_insn (AS1 (je
,%l11
), xops
);
5119 /* Check third byte. */
5120 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
5121 output_asm_insn (AS1 (je
,%l10
), xops
);
5123 /* Check fourth byte and increment address. */
5124 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
5125 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
5126 output_asm_insn (AS1 (jne
,%l8
), xops
);
5128 /* Now generate fixups when the compare stops within a 4-byte word. */
5129 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
5131 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
5132 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5134 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
5135 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5137 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));
5143 output_fp_conditional_move (which_alternative
, operands
)
5144 int which_alternative
;
5147 int code
= GET_CODE (operands
[1]);
5149 /* This is very tricky. We have to do it right. For a code segement
5159 final_scan_insn () may delete the insn which sets CC. We have to
5160 tell final_scan_insn () if it should be reinserted. When CODE is
5161 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5162 NULL_PTR to tell final to reinsert the test insn because the
5163 conditional move cannot be handled properly without it. */
5164 if ((code
== GT
|| code
== LE
)
5165 && (cc_prev_status
.flags
& CC_NO_OVERFLOW
))
5168 switch (which_alternative
)
5171 /* r <- cond ? arg : r */
5172 output_asm_insn (AS2 (fcmov
%F1
,%2,%0), operands
);
5176 /* r <- cond ? r : arg */
5177 output_asm_insn (AS2 (fcmov
%f1
,%3,%0), operands
);
5181 /* r <- cond ? r : arg */
5182 output_asm_insn (AS2 (fcmov
%F1
,%2,%0), operands
);
5183 output_asm_insn (AS2 (fcmov
%f1
,%3,%0), operands
);
5194 output_int_conditional_move (which_alternative
, operands
)
5195 int which_alternative
;
5198 int code
= GET_CODE (operands
[1]);
5199 enum machine_mode mode
;
5202 /* This is very tricky. We have to do it right. For a code segement
5211 final_scan_insn () may delete the insn which sets CC. We have to
5212 tell final_scan_insn () if it should be reinserted. When CODE is
5213 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5214 NULL_PTR to tell final to reinsert the test insn because the
5215 conditional move cannot be handled properly without it. */
5216 if ((code
== GT
|| code
== LE
)
5217 && (cc_prev_status
.flags
& CC_NO_OVERFLOW
))
5220 mode
= GET_MODE (operands
[0]);
5223 xops
[0] = gen_rtx_SUBREG (SImode
, operands
[0], 1);
5224 xops
[1] = operands
[1];
5225 xops
[2] = gen_rtx_SUBREG (SImode
, operands
[2], 1);
5226 xops
[3] = gen_rtx_SUBREG (SImode
, operands
[3], 1);
5229 switch (which_alternative
)
5232 /* r <- cond ? arg : r */
5233 output_asm_insn (AS2 (cmov
%C1
,%2,%0), operands
);
5235 output_asm_insn (AS2 (cmov
%C1
,%2,%0), xops
);
5239 /* r <- cond ? r : arg */
5240 output_asm_insn (AS2 (cmov
%c1
,%3,%0), operands
);
5242 output_asm_insn (AS2 (cmov
%c1
,%3,%0), xops
);
5246 /* rm <- cond ? arg1 : arg2 */
5247 output_asm_insn (AS2 (cmov
%C1
,%2,%0), operands
);
5248 output_asm_insn (AS2 (cmov
%c1
,%3,%0), operands
);
5251 output_asm_insn (AS2 (cmov
%C1
,%2,%0), xops
);
5252 output_asm_insn (AS2 (cmov
%c1
,%3,%0), xops
);