1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
53 #ifdef EXTRA_CONSTRAINT
54 /* If EXTRA_CONSTRAINT is defined, then the 'S'
55 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
56 asm statements that need 'S' for class SIREG will break. */
57 error EXTRA_CONSTRAINT conflicts with S constraint letter
58 /* The previous line used to be #error, but some compilers barf
59 even if the conditional was untrue. */
62 #ifndef CHECK_STACK_LIMIT
63 #define CHECK_STACK_LIMIT -1
66 /* Type of an operand for ix86_{binary,unary}_operator_ok */
74 /* Processor costs (relative to an add) */
75 struct processor_costs i386_cost
= { /* 386 specific costs */
76 1, /* cost of an add instruction */
77 1, /* cost of a lea instruction */
78 3, /* variable shift costs */
79 2, /* constant shift costs */
80 6, /* cost of starting a multiply */
81 1, /* cost of multiply per each bit set */
82 23 /* cost of a divide/mod */
85 struct processor_costs i486_cost
= { /* 486 specific costs */
86 1, /* cost of an add instruction */
87 1, /* cost of a lea instruction */
88 3, /* variable shift costs */
89 2, /* constant shift costs */
90 12, /* cost of starting a multiply */
91 1, /* cost of multiply per each bit set */
92 40 /* cost of a divide/mod */
95 struct processor_costs pentium_cost
= {
96 1, /* cost of an add instruction */
97 1, /* cost of a lea instruction */
98 4, /* variable shift costs */
99 1, /* constant shift costs */
100 11, /* cost of starting a multiply */
101 0, /* cost of multiply per each bit set */
102 25 /* cost of a divide/mod */
105 struct processor_costs pentiumpro_cost
= {
106 1, /* cost of an add instruction */
107 1, /* cost of a lea instruction */
108 3, /* variable shift costs */
109 1, /* constant shift costs */
110 4, /* cost of starting a multiply */
111 0, /* cost of multiply per each bit set */
112 17 /* cost of a divide/mod */
115 struct processor_costs
*ix86_cost
= &pentium_cost
;
117 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
119 extern FILE *asm_out_file
;
120 extern char *strcat ();
122 static void ix86_epilogue
PROTO((int));
123 static void ix86_prologue
PROTO((int));
125 char *singlemove_string ();
126 char *output_move_const_single ();
127 char *output_fp_cc0_set ();
129 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
130 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
131 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
133 /* Array of the smallest class containing reg number REGNO, indexed by
134 REGNO. Used by REGNO_REG_CLASS in i386.h. */
136 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
139 AREG
, DREG
, CREG
, BREG
,
141 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
143 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
144 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
149 /* Test and compare insns in i386.md store the information needed to
150 generate branch and scc insns here. */
152 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
153 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
154 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
156 /* which cpu are we scheduling for */
157 enum processor_type ix86_cpu
;
159 /* which instruction set architecture to use. */
162 /* Strings to hold which cpu and instruction set architecture to use. */
163 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
164 char *ix86_arch_string
; /* for -march=<xxx> */
166 /* Register allocation order */
167 char *i386_reg_alloc_order
;
168 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
170 /* # of registers to use to pass arguments. */
171 char *i386_regparm_string
;
173 /* i386_regparm_string as a number */
176 /* Alignment to use for loops and jumps: */
178 /* Power of two alignment for loops. */
179 char *i386_align_loops_string
;
181 /* Power of two alignment for non-loop jumps. */
182 char *i386_align_jumps_string
;
184 /* Values 1-5: see jump.c */
185 int i386_branch_cost
;
186 char *i386_branch_cost_string
;
188 /* Power of two alignment for functions. */
189 int i386_align_funcs
;
190 char *i386_align_funcs_string
;
192 /* Power of two alignment for loops. */
193 int i386_align_loops
;
195 /* Power of two alignment for non-loop jumps. */
196 int i386_align_jumps
;
198 /* Sometimes certain combinations of command options do not make
199 sense on a particular target machine. You can define a macro
200 `OVERRIDE_OPTIONS' to take account of this. This macro, if
201 defined, is executed once just after all the command options have
204 Don't use this macro to turn on various extra optimizations for
205 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
215 char *name
; /* Canonical processor name. */
216 enum processor_type processor
; /* Processor type enum value. */
217 struct processor_costs
*cost
; /* Processor costs */
218 int target_enable
; /* Target flags to enable. */
219 int target_disable
; /* Target flags to disable. */
220 } processor_target_table
[]
221 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
222 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
223 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
224 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
225 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
,
227 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
,
228 &pentiumpro_cost
, 0, 0}};
230 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
232 #ifdef SUBTARGET_OVERRIDE_OPTIONS
233 SUBTARGET_OVERRIDE_OPTIONS
;
236 /* Validate registers in register allocation order. */
237 if (i386_reg_alloc_order
)
239 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
245 case 'a': regno
= 0; break;
246 case 'd': regno
= 1; break;
247 case 'c': regno
= 2; break;
248 case 'b': regno
= 3; break;
249 case 'S': regno
= 4; break;
250 case 'D': regno
= 5; break;
251 case 'B': regno
= 6; break;
253 default: fatal ("Register '%c' is unknown", ch
);
256 if (regs_allocated
[regno
])
257 fatal ("Register '%c' already specified in allocation order", ch
);
259 regs_allocated
[regno
] = 1;
263 if (ix86_arch_string
== 0)
265 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
266 if (ix86_cpu_string
== 0)
267 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
270 for (i
= 0; i
< ptt_size
; i
++)
271 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
273 ix86_arch
= processor_target_table
[i
].processor
;
274 if (ix86_cpu_string
== 0)
275 ix86_cpu_string
= processor_target_table
[i
].name
;
281 error ("bad value (%s) for -march= switch", ix86_arch_string
);
282 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
283 ix86_arch
= PROCESSOR_DEFAULT
;
286 if (ix86_cpu_string
== 0)
287 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
289 for (j
= 0; j
< ptt_size
; j
++)
290 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
292 ix86_cpu
= processor_target_table
[j
].processor
;
293 ix86_cost
= processor_target_table
[j
].cost
;
294 if (i
> j
&& (int) ix86_arch
>= (int) PROCESSOR_PENTIUMPRO
)
295 error ("-mcpu=%s does not support -march=%s",
296 ix86_cpu_string
, ix86_arch_string
);
298 target_flags
|= processor_target_table
[j
].target_enable
;
299 target_flags
&= ~processor_target_table
[j
].target_disable
;
305 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
306 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
307 ix86_cpu
= PROCESSOR_DEFAULT
;
310 /* Validate -mregparm= value. */
311 if (i386_regparm_string
)
313 i386_regparm
= atoi (i386_regparm_string
);
314 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
315 fatal ("-mregparm=%d is not between 0 and %d",
316 i386_regparm
, REGPARM_MAX
);
319 /* The 486 suffers more from non-aligned cache line fills, and the
320 larger code size results in a larger cache foot-print and more misses.
321 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
323 def_align
= (TARGET_486
) ? 4 : 2;
325 /* Validate -malign-loops= value, or provide default. */
326 if (i386_align_loops_string
)
328 i386_align_loops
= atoi (i386_align_loops_string
);
329 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
330 fatal ("-malign-loops=%d is not between 0 and %d",
331 i386_align_loops
, MAX_CODE_ALIGN
);
334 i386_align_loops
= 2;
336 /* Validate -malign-jumps= value, or provide default. */
337 if (i386_align_jumps_string
)
339 i386_align_jumps
= atoi (i386_align_jumps_string
);
340 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
341 fatal ("-malign-jumps=%d is not between 0 and %d",
342 i386_align_jumps
, MAX_CODE_ALIGN
);
345 i386_align_jumps
= def_align
;
347 /* Validate -malign-functions= value, or provide default. */
348 if (i386_align_funcs_string
)
350 i386_align_funcs
= atoi (i386_align_funcs_string
);
351 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
352 fatal ("-malign-functions=%d is not between 0 and %d",
353 i386_align_funcs
, MAX_CODE_ALIGN
);
356 i386_align_funcs
= def_align
;
358 /* Validate -mbranch-cost= value, or provide default. */
359 if (i386_branch_cost_string
)
361 i386_branch_cost
= atoi (i386_branch_cost_string
);
362 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
363 fatal ("-mbranch-cost=%d is not between 0 and 5",
367 i386_branch_cost
= 1;
369 /* Keep nonleaf frame pointers. */
370 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
371 flag_omit_frame_pointer
= 1;
374 /* A C statement (sans semicolon) to choose the order in which to
375 allocate hard registers for pseudo-registers local to a basic
378 Store the desired register order in the array `reg_alloc_order'.
379 Element 0 should be the register to allocate first; element 1, the
380 next register; and so on.
382 The macro body should not assume anything about the contents of
383 `reg_alloc_order' before execution of the macro.
385 On most machines, it is not necessary to define this macro. */
388 order_regs_for_local_alloc ()
392 /* User specified the register allocation order. */
394 if (i386_reg_alloc_order
)
396 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
402 case 'a': regno
= 0; break;
403 case 'd': regno
= 1; break;
404 case 'c': regno
= 2; break;
405 case 'b': regno
= 3; break;
406 case 'S': regno
= 4; break;
407 case 'D': regno
= 5; break;
408 case 'B': regno
= 6; break;
411 reg_alloc_order
[order
++] = regno
;
414 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
416 if (! regs_allocated
[i
])
417 reg_alloc_order
[order
++] = i
;
421 /* If user did not specify a register allocation order, use natural order. */
424 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
425 reg_alloc_order
[i
] = i
;
430 optimization_options (level
, size
)
434 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
435 make the problem with not enough registers even worse. */
436 #ifdef INSN_SCHEDULING
438 flag_schedule_insns
= 0;
442 /* Sign-extend a 16-bit constant */
445 i386_sext16_if_const (op
)
448 if (GET_CODE (op
) == CONST_INT
)
450 HOST_WIDE_INT val
= INTVAL (op
);
451 HOST_WIDE_INT sext_val
;
453 sext_val
= val
| ~0xffff;
455 sext_val
= val
& 0xffff;
457 op
= GEN_INT (sext_val
);
462 /* Return nonzero if the rtx is aligned */
465 i386_aligned_reg_p (regno
)
468 return (regno
== STACK_POINTER_REGNUM
469 || (! flag_omit_frame_pointer
&& regno
== FRAME_POINTER_REGNUM
));
476 /* Registers and immediate operands are always "aligned". */
477 if (GET_CODE (op
) != MEM
)
480 /* Don't even try to do any aligned optimizations with volatiles. */
481 if (MEM_VOLATILE_P (op
))
484 /* Get address of memory operand. */
487 switch (GET_CODE (op
))
494 /* Match "reg + offset" */
496 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
498 if (INTVAL (XEXP (op
, 1)) & 3)
502 if (GET_CODE (op
) != REG
)
505 /* ... fall through ... */
508 return i386_aligned_reg_p (REGNO (op
));
517 /* Return nonzero if INSN looks like it won't compute useful cc bits
518 as a side effect. This information is only a hint. */
521 i386_cc_probably_useless_p (insn
)
524 return ! next_cc0_user (insn
);
527 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
528 attribute for DECL. The attributes in ATTRIBUTES have previously been
532 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
541 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
542 attribute for TYPE. The attributes in ATTRIBUTES have previously been
546 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
552 if (TREE_CODE (type
) != FUNCTION_TYPE
553 && TREE_CODE (type
) != FIELD_DECL
554 && TREE_CODE (type
) != TYPE_DECL
)
557 /* Stdcall attribute says callee is responsible for popping arguments
558 if they are not variable. */
559 if (is_attribute_p ("stdcall", identifier
))
560 return (args
== NULL_TREE
);
562 /* Cdecl attribute says the callee is a normal C declaration. */
563 if (is_attribute_p ("cdecl", identifier
))
564 return (args
== NULL_TREE
);
566 /* Regparm attribute specifies how many integer arguments are to be
567 passed in registers. */
568 if (is_attribute_p ("regparm", identifier
))
572 if (! args
|| TREE_CODE (args
) != TREE_LIST
573 || TREE_CHAIN (args
) != NULL_TREE
574 || TREE_VALUE (args
) == NULL_TREE
)
577 cst
= TREE_VALUE (args
);
578 if (TREE_CODE (cst
) != INTEGER_CST
)
581 if (TREE_INT_CST_HIGH (cst
) != 0
582 || TREE_INT_CST_LOW (cst
) < 0
583 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
592 /* Return 0 if the attributes for two types are incompatible, 1 if they
593 are compatible, and 2 if they are nearly compatible (which causes a
594 warning to be generated). */
597 i386_comp_type_attributes (type1
, type2
)
605 /* Value is the number of bytes of arguments automatically
606 popped when returning from a subroutine call.
607 FUNDECL is the declaration node of the function (as a tree),
608 FUNTYPE is the data type of the function (as a tree),
609 or for a library call it is an identifier node for the subroutine name.
610 SIZE is the number of bytes of arguments passed on the stack.
612 On the 80386, the RTD insn may be used to pop them if the number
613 of args is fixed, but if the number is variable then the caller
614 must pop them all. RTD can't be used for library calls now
615 because the library is compiled with the Unix compiler.
616 Use of RTD is a selectable option, since it is incompatible with
617 standard Unix calling sequences. If the option is not selected,
618 the caller must always pop the args.
620 The attribute stdcall is equivalent to RTD on a per module basis. */
623 i386_return_pops_args (fundecl
, funtype
, size
)
628 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
630 /* Cdecl functions override -mrtd, and never pop the stack. */
631 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
633 /* Stdcall functions will pop the stack if not variable args. */
634 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
638 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
639 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
)))
644 /* Lose any fake structure return argument. */
645 if (aggregate_value_p (TREE_TYPE (funtype
)))
646 return GET_MODE_SIZE (Pmode
);
652 /* Argument support functions. */
654 /* Initialize a variable CUM of type CUMULATIVE_ARGS
655 for a call to a function whose data type is FNTYPE.
656 For a library call, FNTYPE is 0. */
659 init_cumulative_args (cum
, fntype
, libname
)
660 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize */
661 tree fntype
; /* tree ptr for function decl */
662 rtx libname
; /* SYMBOL_REF of library name or 0 */
664 static CUMULATIVE_ARGS zero_cum
;
665 tree param
, next_param
;
667 if (TARGET_DEBUG_ARG
)
669 fprintf (stderr
, "\ninit_cumulative_args (");
671 fprintf (stderr
, "fntype code = %s, ret code = %s",
672 tree_code_name
[(int) TREE_CODE (fntype
)],
673 tree_code_name
[(int) TREE_CODE (TREE_TYPE (fntype
))]);
675 fprintf (stderr
, "no fntype");
678 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
683 /* Set up the number of registers to use for passing arguments. */
684 cum
->nregs
= i386_regparm
;
687 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
690 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
693 /* Determine if this function has variable arguments. This is
694 indicated by the last argument being 'void_type_mode' if there
695 are no variable arguments. If there are variable arguments, then
696 we won't pass anything in registers */
700 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
701 param
!= 0; param
= next_param
)
703 next_param
= TREE_CHAIN (param
);
704 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
709 if (TARGET_DEBUG_ARG
)
710 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
715 /* Update the data in CUM to advance over an argument
716 of mode MODE and data type TYPE.
717 (TYPE is null for libcalls where that information may not be available.) */
720 function_arg_advance (cum
, mode
, type
, named
)
721 CUMULATIVE_ARGS
*cum
; /* current arg information */
722 enum machine_mode mode
; /* current arg mode */
723 tree type
; /* type of the argument or 0 if lib support */
724 int named
; /* whether or not the argument was named */
727 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
728 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
730 if (TARGET_DEBUG_ARG
)
732 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
733 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
748 /* Define where to put the arguments to a function.
749 Value is zero to push the argument on the stack,
750 or a hard register in which to store the argument.
752 MODE is the argument's machine mode.
753 TYPE is the data type of the argument (as a tree).
754 This is null for libcalls where that information may
756 CUM is a variable of type CUMULATIVE_ARGS which gives info about
757 the preceding args and about the function being called.
758 NAMED is nonzero if this argument is a named parameter
759 (otherwise it is an extra parameter matching an ellipsis). */
762 function_arg (cum
, mode
, type
, named
)
763 CUMULATIVE_ARGS
*cum
; /* current arg information */
764 enum machine_mode mode
; /* current arg mode */
765 tree type
; /* type of the argument or 0 if lib support */
766 int named
; /* != 0 for normal args, == 0 for ... args */
770 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
771 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
775 /* For now, pass fp/complex values on the stack. */
784 if (words
<= cum
->nregs
)
785 ret
= gen_rtx_REG (mode
, cum
->regno
);
789 if (TARGET_DEBUG_ARG
)
792 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
793 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
796 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
798 fprintf (stderr
, ", stack");
800 fprintf (stderr
, " )\n");
806 /* For an arg passed partly in registers and partly in memory,
807 this is the number of registers used.
808 For args passed entirely in registers or entirely in memory, zero. */
811 function_arg_partial_nregs (cum
, mode
, type
, named
)
812 CUMULATIVE_ARGS
*cum
; /* current arg information */
813 enum machine_mode mode
; /* current arg mode */
814 tree type
; /* type of the argument or 0 if lib support */
815 int named
; /* != 0 for normal args, == 0 for ... args */
820 /* Output an insn whose source is a 386 integer register. SRC is the
821 rtx for the register, and TEMPLATE is the op-code template. SRC may
822 be either SImode or DImode.
824 The template will be output with operands[0] as SRC, and operands[1]
825 as a pointer to the top of the 386 stack. So a call from floatsidf2
826 would look like this:
828 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
830 where %z0 corresponds to the caller's operands[1], and is used to
831 emit the proper size suffix.
833 ??? Extend this to handle HImode - a 387 can load and store HImode
837 output_op_from_reg (src
, template)
842 int size
= GET_MODE_SIZE (GET_MODE (src
));
845 xops
[1] = AT_SP (Pmode
);
846 xops
[2] = GEN_INT (size
);
847 xops
[3] = stack_pointer_rtx
;
849 if (size
> UNITS_PER_WORD
)
853 if (size
> 2 * UNITS_PER_WORD
)
855 high
= gen_rtx_REG (SImode
, REGNO (src
) + 2);
856 output_asm_insn (AS1 (push
%L0
,%0), &high
);
859 high
= gen_rtx_REG (SImode
, REGNO (src
) + 1);
860 output_asm_insn (AS1 (push
%L0
,%0), &high
);
863 output_asm_insn (AS1 (push
%L0
,%0), &src
);
864 output_asm_insn (template, xops
);
865 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
868 /* Output an insn to pop an value from the 387 top-of-stack to 386
869 register DEST. The 387 register stack is popped if DIES is true. If
870 the mode of DEST is an integer mode, a `fist' integer store is done,
871 otherwise a `fst' float store is done. */
874 output_to_reg (dest
, dies
, scratch_mem
)
880 int size
= GET_MODE_SIZE (GET_MODE (dest
));
883 xops
[0] = AT_SP (Pmode
);
885 xops
[0] = scratch_mem
;
887 xops
[1] = stack_pointer_rtx
;
888 xops
[2] = GEN_INT (size
);
892 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
894 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
897 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
899 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
902 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
905 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
908 if (GET_MODE (dest
) == XFmode
)
910 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
911 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
914 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
922 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
924 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
927 if (size
> UNITS_PER_WORD
)
929 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
931 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
934 xops
[0] = adj_offsettable_operand (xops
[0], 4);
936 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
939 if (size
> 2 * UNITS_PER_WORD
)
941 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
943 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
946 xops
[0] = adj_offsettable_operand (xops
[0], 4);
947 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
954 singlemove_string (operands
)
958 if (GET_CODE (operands
[0]) == MEM
959 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
961 if (XEXP (x
, 0) != stack_pointer_rtx
)
965 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
966 return output_move_const_single (operands
);
967 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
968 return AS2 (mov
%L0
,%1,%0);
969 else if (CONSTANT_P (operands
[1]))
970 return AS2 (mov
%L0
,%1,%0);
973 output_asm_insn ("push%L1 %1", operands
);
978 /* Return a REG that occurs in ADDR with coefficient 1.
979 ADDR can be effectively incremented by incrementing REG. */
985 while (GET_CODE (addr
) == PLUS
)
987 if (GET_CODE (XEXP (addr
, 0)) == REG
)
988 addr
= XEXP (addr
, 0);
989 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
990 addr
= XEXP (addr
, 1);
991 else if (CONSTANT_P (XEXP (addr
, 0)))
992 addr
= XEXP (addr
, 1);
993 else if (CONSTANT_P (XEXP (addr
, 1)))
994 addr
= XEXP (addr
, 0);
999 if (GET_CODE (addr
) == REG
)
1004 /* Output an insn to add the constant N to the register X. */
1015 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
1017 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
1018 else if (n
< 0 || n
== 128)
1020 xops
[1] = GEN_INT (-n
);
1021 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
1025 xops
[1] = GEN_INT (n
);
1026 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
1030 /* Output assembler code to perform a doubleword move insn
1031 with operands OPERANDS. */
1034 output_move_double (operands
)
1037 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
1041 rtx addreg0
= 0, addreg1
= 0;
1042 int dest_overlapped_low
= 0;
1043 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1048 /* First classify both operands. */
1050 if (REG_P (operands
[0]))
1052 else if (offsettable_memref_p (operands
[0]))
1054 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1056 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1058 else if (GET_CODE (operands
[0]) == MEM
)
1063 if (REG_P (operands
[1]))
1065 else if (CONSTANT_P (operands
[1]))
1067 else if (offsettable_memref_p (operands
[1]))
1069 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1071 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1073 else if (GET_CODE (operands
[1]) == MEM
)
1078 /* Check for the cases that the operand constraints are not
1079 supposed to allow to happen. Abort if we get one,
1080 because generating code for these cases is painful. */
1082 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1085 /* If one operand is decrementing and one is incrementing
1086 decrement the former register explicitly
1087 and change that operand into ordinary indexing. */
1089 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1091 /* ??? Can this ever happen on i386? */
1092 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1093 asm_add (-size
, operands
[0]);
1094 if (GET_MODE (operands
[1]) == XFmode
)
1095 operands
[0] = gen_rtx_MEM (XFmode
, operands
[0]);
1096 else if (GET_MODE (operands
[0]) == DFmode
)
1097 operands
[0] = gen_rtx_MEM (DFmode
, operands
[0]);
1099 operands
[0] = gen_rtx_MEM (DImode
, operands
[0]);
1103 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1105 /* ??? Can this ever happen on i386? */
1106 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1107 asm_add (-size
, operands
[1]);
1108 if (GET_MODE (operands
[1]) == XFmode
)
1109 operands
[1] = gen_rtx_MEM (XFmode
, operands
[1]);
1110 else if (GET_MODE (operands
[1]) == DFmode
)
1111 operands
[1] = gen_rtx_MEM (DFmode
, operands
[1]);
1113 operands
[1] = gen_rtx_MEM (DImode
, operands
[1]);
1117 /* If an operand is an unoffsettable memory ref, find a register
1118 we can increment temporarily to make it refer to the second word. */
1120 if (optype0
== MEMOP
)
1121 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1123 if (optype1
== MEMOP
)
1124 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1126 /* Ok, we can do one word at a time.
1127 Normally we do the low-numbered word first,
1128 but if either operand is autodecrementing then we
1129 do the high-numbered word first.
1131 In either case, set up in LATEHALF the operands to use
1132 for the high-numbered word and in some cases alter the
1133 operands in OPERANDS to be suitable for the low-numbered word. */
1137 if (optype0
== REGOP
)
1139 middlehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1140 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 2);
1142 else if (optype0
== OFFSOP
)
1144 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1145 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1149 middlehalf
[0] = operands
[0];
1150 latehalf
[0] = operands
[0];
1153 if (optype1
== REGOP
)
1155 middlehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1156 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 2);
1158 else if (optype1
== OFFSOP
)
1160 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1161 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1163 else if (optype1
== CNSTOP
)
1165 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1167 REAL_VALUE_TYPE r
; long l
[3];
1169 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1170 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1171 operands
[1] = GEN_INT (l
[0]);
1172 middlehalf
[1] = GEN_INT (l
[1]);
1173 latehalf
[1] = GEN_INT (l
[2]);
1175 else if (CONSTANT_P (operands
[1]))
1176 /* No non-CONST_DOUBLE constant should ever appear here. */
1181 middlehalf
[1] = operands
[1];
1182 latehalf
[1] = operands
[1];
1188 /* Size is not 12. */
1190 if (optype0
== REGOP
)
1191 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1192 else if (optype0
== OFFSOP
)
1193 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1195 latehalf
[0] = operands
[0];
1197 if (optype1
== REGOP
)
1198 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1199 else if (optype1
== OFFSOP
)
1200 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1201 else if (optype1
== CNSTOP
)
1202 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1204 latehalf
[1] = operands
[1];
1207 /* If insn is effectively movd N (sp),-(sp) then we will do the
1208 high word first. We should use the adjusted operand 1
1209 (which is N+4 (sp) or N+8 (sp))
1210 for the low word and middle word as well,
1211 to compensate for the first decrement of sp. */
1212 if (optype0
== PUSHOP
1213 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1214 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1215 middlehalf
[1] = operands
[1] = latehalf
[1];
1217 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1218 if the upper part of reg N does not appear in the MEM, arrange to
1219 emit the move late-half first. Otherwise, compute the MEM address
1220 into the upper part of N and use that as a pointer to the memory
1222 if (optype0
== REGOP
1223 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1225 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1226 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1228 /* If both halves of dest are used in the src memory address,
1229 compute the address into latehalf of dest. */
1231 xops
[0] = latehalf
[0];
1232 xops
[1] = XEXP (operands
[1], 0);
1233 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1234 if (GET_MODE (operands
[1]) == XFmode
)
1236 operands
[1] = gen_rtx_MEM (XFmode
, latehalf
[0]);
1237 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1238 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1242 operands
[1] = gen_rtx_MEM (DImode
, latehalf
[0]);
1243 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1248 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1250 /* Check for two regs used by both source and dest. */
1251 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1252 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1255 /* JRV says this can't happen: */
1256 if (addreg0
|| addreg1
)
1259 /* Only the middle reg conflicts; simply put it last. */
1260 output_asm_insn (singlemove_string (operands
), operands
);
1261 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1262 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1266 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1267 /* If the low half of dest is mentioned in the source memory
1268 address, the arrange to emit the move late half first. */
1269 dest_overlapped_low
= 1;
1272 /* If one or both operands autodecrementing,
1273 do the two words, high-numbered first. */
1275 /* Likewise, the first move would clobber the source of the second one,
1276 do them in the other order. This happens only for registers;
1277 such overlap can't happen in memory unless the user explicitly
1278 sets it up, and that is an undefined circumstance. */
1281 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1282 || (optype0
== REGOP
&& optype1
== REGOP
1283 && REGNO (operands
[0]) == REGNO (latehalf
[1]))
1284 || dest_overlapped_low
)
1287 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1288 || (optype0
== REGOP
&& optype1
== REGOP
1289 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1290 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1291 || dest_overlapped_low
)
1293 /* Make any unoffsettable addresses point at high-numbered word. */
1295 asm_add (size
-4, addreg0
);
1297 asm_add (size
-4, addreg1
);
1300 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1302 /* Undo the adds we just did. */
1304 asm_add (-4, addreg0
);
1306 asm_add (-4, addreg1
);
1310 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1312 asm_add (-4, addreg0
);
1314 asm_add (-4, addreg1
);
1317 /* Do low-numbered word. */
1318 return singlemove_string (operands
);
1321 /* Normal case: do the two words, low-numbered first. */
1323 output_asm_insn (singlemove_string (operands
), operands
);
1325 /* Do the middle one of the three words for long double */
1329 asm_add (4, addreg0
);
1331 asm_add (4, addreg1
);
1333 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1336 /* Make any unoffsettable addresses point at high-numbered word. */
1338 asm_add (4, addreg0
);
1340 asm_add (4, addreg1
);
1343 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1345 /* Undo the adds we just did. */
1347 asm_add (4-size
, addreg0
);
1349 asm_add (4-size
, addreg1
);
1354 #define MAX_TMPS 2 /* max temporary registers used */
1356 /* Output the appropriate code to move push memory on the stack */
1359 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1371 } tmp_info
[MAX_TMPS
];
1373 rtx src
= operands
[1];
1376 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1377 int stack_offset
= 0;
1381 if (! offsettable_memref_p (src
))
1382 fatal_insn ("Source is not offsettable", insn
);
1384 if ((length
& 3) != 0)
1385 fatal_insn ("Pushing non-word aligned size", insn
);
1387 /* Figure out which temporary registers we have available */
1388 for (i
= tmp_start
; i
< n_operands
; i
++)
1390 if (GET_CODE (operands
[i
]) == REG
)
1392 if (reg_overlap_mentioned_p (operands
[i
], src
))
1395 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1396 if (max_tmps
== MAX_TMPS
)
1402 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1404 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1405 output_asm_insn (AS1(push
%L0
,%0), xops
);
1411 for (offset
= length
- 4; offset
>= 0; )
1413 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1415 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1416 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1417 tmp_info
[num_tmps
].xops
[0]
1418 = adj_offsettable_operand (src
, offset
+ stack_offset
);
1422 for (i
= 0; i
< num_tmps
; i
++)
1423 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1425 for (i
= 0; i
< num_tmps
; i
++)
1426 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1429 stack_offset
+= 4*num_tmps
;
1435 /* Output the appropriate code to move data between two memory locations */
1438 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1450 } tmp_info
[MAX_TMPS
];
1452 rtx dest
= operands
[0];
1453 rtx src
= operands
[1];
1454 rtx qi_tmp
= NULL_RTX
;
1460 if (GET_CODE (dest
) == MEM
1461 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1462 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1463 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1465 if (! offsettable_memref_p (src
))
1466 fatal_insn ("Source is not offsettable", insn
);
1468 if (! offsettable_memref_p (dest
))
1469 fatal_insn ("Destination is not offsettable", insn
);
1471 /* Figure out which temporary registers we have available */
1472 for (i
= tmp_start
; i
< n_operands
; i
++)
1474 if (GET_CODE (operands
[i
]) == REG
)
1476 if ((length
& 1) != 0 && qi_tmp
== 0 && QI_REG_P (operands
[i
]))
1477 qi_tmp
= operands
[i
];
1479 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1480 fatal_insn ("Temporary register overlaps the destination", insn
);
1482 if (reg_overlap_mentioned_p (operands
[i
], src
))
1483 fatal_insn ("Temporary register overlaps the source", insn
);
1485 tmp_info
[max_tmps
++].xops
[2] = operands
[i
];
1486 if (max_tmps
== MAX_TMPS
)
1492 fatal_insn ("No scratch registers were found to do memory->memory moves",
1495 if ((length
& 1) != 0)
1498 fatal_insn ("No byte register found when moving odd # of bytes.",
1504 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1508 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1509 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1510 tmp_info
[num_tmps
].xops
[0]
1511 = adj_offsettable_operand (dest
, offset
);
1512 tmp_info
[num_tmps
].xops
[1]
1513 = adj_offsettable_operand (src
, offset
);
1519 else if (length
>= 2)
1521 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1522 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1523 tmp_info
[num_tmps
].xops
[0]
1524 = adj_offsettable_operand (dest
, offset
);
1525 tmp_info
[num_tmps
].xops
[1]
1526 = adj_offsettable_operand (src
, offset
);
1535 for (i
= 0; i
< num_tmps
; i
++)
1536 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1538 for (i
= 0; i
< num_tmps
; i
++)
1539 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1544 xops
[0] = adj_offsettable_operand (dest
, offset
);
1545 xops
[1] = adj_offsettable_operand (src
, offset
);
1547 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1548 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1555 standard_80387_constant_p (x
)
1558 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1563 if (setjmp (handler
))
1566 set_float_handler (handler
);
1567 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1568 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1569 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1570 set_float_handler (NULL_PTR
);
1578 /* Note that on the 80387, other constants, such as pi,
1579 are much slower to load as standard constants
1580 than to load from doubles in memory! */
1587 output_move_const_single (operands
)
1590 if (FP_REG_P (operands
[0]))
1592 int conval
= standard_80387_constant_p (operands
[1]);
1601 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1603 REAL_VALUE_TYPE r
; long l
;
1605 if (GET_MODE (operands
[1]) == XFmode
)
1608 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1609 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1610 operands
[1] = GEN_INT (l
);
1613 return singlemove_string (operands
);
1616 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1617 reference and a constant. */
1620 symbolic_operand (op
, mode
)
1622 enum machine_mode mode
;
1624 switch (GET_CODE (op
))
1632 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1633 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1634 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1641 /* Test for a valid operand for a call instruction.
1642 Don't allow the arg pointer register or virtual regs
1643 since they may change into reg + const, which the patterns
1644 can't handle yet. */
1647 call_insn_operand (op
, mode
)
1649 enum machine_mode mode
;
1651 if (GET_CODE (op
) == MEM
1652 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1653 /* This makes a difference for PIC. */
1654 && general_operand (XEXP (op
, 0), Pmode
))
1655 || (GET_CODE (XEXP (op
, 0)) == REG
1656 && XEXP (op
, 0) != arg_pointer_rtx
1657 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1658 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1664 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1668 expander_call_insn_operand (op
, mode
)
1670 enum machine_mode mode
;
1672 if (GET_CODE (op
) == MEM
1673 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1674 || (GET_CODE (XEXP (op
, 0)) == REG
1675 && XEXP (op
, 0) != arg_pointer_rtx
1676 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1677 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1683 /* Return 1 if OP is a comparison operator that can use the condition code
1684 generated by an arithmetic operation. */
1687 arithmetic_comparison_operator (op
, mode
)
1689 enum machine_mode mode
;
1693 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1696 code
= GET_CODE (op
);
1697 if (GET_RTX_CLASS (code
) != '<')
1700 return (code
!= GT
&& code
!= LE
);
1704 ix86_logical_operator (op
, mode
)
1706 enum machine_mode mode
;
1708 return GET_CODE (op
) == AND
|| GET_CODE (op
) == IOR
|| GET_CODE (op
) == XOR
;
1712 /* Returns 1 if OP contains a symbol reference */
1715 symbolic_reference_mentioned_p (op
)
1721 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1724 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1725 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1731 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1732 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1736 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1743 /* Attempt to expand a binary operator. Make the expansion closer to the
1744 actual machine, then just general_operand, which will allow 3 separate
1745 memory references (one output, two input) in a single insn. Return
1746 whether the insn fails, or succeeds. */
1749 ix86_expand_binary_operator (code
, mode
, operands
)
1751 enum machine_mode mode
;
1756 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1757 if (GET_RTX_CLASS (code
) == 'c'
1758 && (rtx_equal_p (operands
[0], operands
[2])
1759 || immediate_operand (operands
[1], mode
)))
1761 rtx temp
= operands
[1];
1762 operands
[1] = operands
[2];
1766 /* If optimizing, copy to regs to improve CSE */
1767 if (TARGET_PSEUDO
&& optimize
1768 && ((reload_in_progress
| reload_completed
) == 0))
1770 if (GET_CODE (operands
[1]) == MEM
1771 && ! rtx_equal_p (operands
[0], operands
[1]))
1772 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1774 if (GET_CODE (operands
[2]) == MEM
)
1775 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1777 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1779 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1781 emit_move_insn (temp
, operands
[1]);
1787 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1789 /* If not optimizing, try to make a valid insn (optimize code
1790 previously did this above to improve chances of CSE) */
1792 if ((! TARGET_PSEUDO
|| !optimize
)
1793 && ((reload_in_progress
| reload_completed
) == 0)
1794 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1797 if (GET_CODE (operands
[1]) == MEM
1798 && ! rtx_equal_p (operands
[0], operands
[1]))
1800 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1804 if (GET_CODE (operands
[2]) == MEM
)
1806 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1810 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1812 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1814 emit_move_insn (temp
, operands
[1]);
1819 if (modified
&& ! ix86_binary_operator_ok (code
, mode
, operands
))
1829 /* Return TRUE or FALSE depending on whether the binary operator meets the
1830 appropriate constraints. */
1833 ix86_binary_operator_ok (code
, mode
, operands
)
1835 enum machine_mode mode
;
1838 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1839 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1842 /* Attempt to expand a unary operator. Make the expansion closer to the
1843 actual machine, then just general_operand, which will allow 2 separate
1844 memory references (one output, one input) in a single insn. Return
1845 whether the insn fails, or succeeds. */
1848 ix86_expand_unary_operator (code
, mode
, operands
)
1850 enum machine_mode mode
;
1853 /* If optimizing, copy to regs to improve CSE */
1856 && ((reload_in_progress
| reload_completed
) == 0)
1857 && GET_CODE (operands
[1]) == MEM
)
1858 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1860 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1862 if ((! TARGET_PSEUDO
|| optimize
== 0)
1863 && ((reload_in_progress
| reload_completed
) == 0)
1864 && GET_CODE (operands
[1]) == MEM
)
1866 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1867 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1877 /* Return TRUE or FALSE depending on whether the unary operator meets the
1878 appropriate constraints. */
1881 ix86_unary_operator_ok (code
, mode
, operands
)
1883 enum machine_mode mode
;
1889 static rtx pic_label_rtx
;
1890 static char pic_label_name
[256];
1891 static int pic_label_no
= 0;
1893 /* This function generates code for -fpic that loads %ebx with
1894 with the return address of the caller and then returns. */
1897 asm_output_function_prefix (file
, name
)
1902 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1903 || current_function_uses_const_pool
);
1904 xops
[0] = pic_offset_table_rtx
;
1905 xops
[1] = stack_pointer_rtx
;
1907 /* Deep branch prediction favors having a return for every call. */
1908 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1912 if (pic_label_rtx
== 0)
1914 pic_label_rtx
= gen_label_rtx ();
1915 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
1916 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1919 prologue_node
= make_node (FUNCTION_DECL
);
1920 DECL_RESULT (prologue_node
) = 0;
1921 #ifdef ASM_DECLARE_FUNCTION_NAME
1922 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1924 output_asm_insn ("movl (%1),%0", xops
);
1925 output_asm_insn ("ret", xops
);
1929 /* Generate the assembly code for function entry.
1930 FILE is an stdio stream to output the code to.
1931 SIZE is an int: how many units of temporary storage to allocate. */
1934 function_prologue (file
, size
)
1938 if (TARGET_SCHEDULE_PROLOGUE
)
1947 /* Expand the prologue into a bunch of separate insns. */
1950 ix86_expand_prologue ()
1952 if (! TARGET_SCHEDULE_PROLOGUE
)
1959 load_pic_register (do_rtl
)
1964 if (TARGET_DEEP_BRANCH_PREDICTION
)
1966 xops
[0] = pic_offset_table_rtx
;
1967 if (pic_label_rtx
== 0)
1969 pic_label_rtx
= gen_label_rtx ();
1970 ASM_GENERATE_INTERNAL_LABEL (pic_label_name
, "LPR", pic_label_no
++);
1971 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1974 xops
[1] = gen_rtx_MEM (QImode
,
1975 gen_rtx (SYMBOL_REF
, Pmode
,
1976 LABEL_NAME (pic_label_rtx
)));
1980 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
1981 emit_insn (gen_prologue_set_got (xops
[0],
1982 gen_rtx (SYMBOL_REF
, Pmode
,
1983 "$_GLOBAL_OFFSET_TABLE_"),
1988 output_asm_insn (AS1 (call
,%X1
), xops
);
1989 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1996 xops
[0] = pic_offset_table_rtx
;
1997 xops
[1] = gen_label_rtx ();
2001 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2002 a new CODE_LABEL after reload, so we need a single pattern to
2003 emit the 3 necessary instructions. */
2004 emit_insn (gen_prologue_get_pc_and_set_got (xops
[0]));
2008 output_asm_insn (AS1 (call
,%P1
), xops
);
2009 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
2010 CODE_LABEL_NUMBER (xops
[1]));
2011 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
2012 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
2016 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2017 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2018 moved before any instruction which implicitly uses the got. */
2021 emit_insn (gen_blockage ());
2025 ix86_prologue (do_rtl
)
2031 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2032 || current_function_uses_const_pool
);
2033 long tsize
= get_frame_size ();
2035 int cfa_offset
= INCOMING_FRAME_SP_OFFSET
, cfa_store_offset
= cfa_offset
;
2037 xops
[0] = stack_pointer_rtx
;
2038 xops
[1] = frame_pointer_rtx
;
2039 xops
[2] = GEN_INT (tsize
);
2041 if (frame_pointer_needed
)
2045 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2046 gen_rtx_MEM (SImode
,
2047 gen_rtx (PRE_DEC
, SImode
,
2048 stack_pointer_rtx
)),
2049 frame_pointer_rtx
));
2051 RTX_FRAME_RELATED_P (insn
) = 1;
2052 insn
= emit_move_insn (xops
[1], xops
[0]);
2053 RTX_FRAME_RELATED_P (insn
) = 1;
2058 output_asm_insn ("push%L1 %1", xops
);
2059 #ifdef INCOMING_RETURN_ADDR_RTX
2060 if (dwarf2out_do_frame ())
2062 char *l
= dwarf2out_cfi_label ();
2064 cfa_store_offset
+= 4;
2065 cfa_offset
= cfa_store_offset
;
2066 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2067 dwarf2out_reg_save (l
, FRAME_POINTER_REGNUM
, - cfa_store_offset
);
2071 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
2072 #ifdef INCOMING_RETURN_ADDR_RTX
2073 if (dwarf2out_do_frame ())
2074 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM
, cfa_offset
);
2081 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
2085 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
2086 RTX_FRAME_RELATED_P (insn
) = 1;
2090 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
2091 #ifdef INCOMING_RETURN_ADDR_RTX
2092 if (dwarf2out_do_frame ())
2094 cfa_store_offset
+= tsize
;
2095 if (! frame_pointer_needed
)
2097 cfa_offset
= cfa_store_offset
;
2098 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, cfa_offset
);
2106 xops
[3] = gen_rtx_REG (SImode
, 0);
2108 emit_move_insn (xops
[3], xops
[2]);
2110 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
2112 xops
[3] = gen_rtx_MEM (FUNCTION_MODE
,
2113 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
2116 emit_call_insn (gen_rtx (CALL
, VOIDmode
, xops
[3], const0_rtx
));
2118 output_asm_insn (AS1 (call
,%P3
), xops
);
2121 /* Note If use enter it is NOT reversed args.
2122 This one is not reversed from intel!!
2123 I think enter is slower. Also sdb doesn't like it.
2124 But if you want it the code is:
2126 xops[3] = const0_rtx;
2127 output_asm_insn ("enter %2,%3", xops);
2131 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2132 for (regno
= limit
- 1; regno
>= 0; regno
--)
2133 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2134 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2136 xops
[0] = gen_rtx_REG (SImode
, regno
);
2139 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2140 gen_rtx_MEM (SImode
,
2141 gen_rtx (PRE_DEC
, SImode
,
2142 stack_pointer_rtx
)),
2145 RTX_FRAME_RELATED_P (insn
) = 1;
2149 output_asm_insn ("push%L0 %0", xops
);
2150 #ifdef INCOMING_RETURN_ADDR_RTX
2151 if (dwarf2out_do_frame ())
2153 char *l
= dwarf2out_cfi_label ();
2155 cfa_store_offset
+= 4;
2156 if (! frame_pointer_needed
)
2158 cfa_offset
= cfa_store_offset
;
2159 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2162 dwarf2out_reg_save (l
, regno
, - cfa_store_offset
);
2169 load_pic_register (do_rtl
);
2171 /* If we are profiling, make sure no instructions are scheduled before
2172 the call to mcount. However, if -fpic, the above call will have
2174 if ((profile_flag
|| profile_block_flag
)
2175 && ! pic_reg_used
&& do_rtl
)
2176 emit_insn (gen_blockage ());
2179 /* Return 1 if it is appropriate to emit `ret' instructions in the
2180 body of a function. Do this only if the epilogue is simple, needing a
2181 couple of insns. Prior to reloading, we can't tell how many registers
2182 must be saved, so return 0 then. Return 0 if there is no frame
2183 marker to de-allocate.
2185 If NON_SAVING_SETJMP is defined and true, then it is not possible
2186 for the epilogue to be simple, so return 0. This is a special case
2187 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2188 until final, but jump_optimize may need to know sooner if a
2192 ix86_can_use_return_insn_p ()
2196 int reglimit
= (frame_pointer_needed
2197 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2198 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2199 || current_function_uses_const_pool
);
2201 #ifdef NON_SAVING_SETJMP
2202 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2206 if (! reload_completed
)
2209 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2210 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2211 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2214 return nregs
== 0 || ! frame_pointer_needed
;
2217 /* This function generates the assembly code for function exit.
2218 FILE is an stdio stream to output the code to.
2219 SIZE is an int: how many units of temporary storage to deallocate. */
2222 function_epilogue (file
, size
)
2229 /* Restore function stack, frame, and registers. */
2232 ix86_expand_epilogue ()
2238 ix86_epilogue (do_rtl
)
2242 register int nregs
, limit
;
2245 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2246 || current_function_uses_const_pool
);
2247 long tsize
= get_frame_size ();
2249 /* Compute the number of registers to pop */
2251 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2255 for (regno
= limit
- 1; regno
>= 0; regno
--)
2256 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2257 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2260 /* sp is often unreliable so we must go off the frame pointer.
2262 In reality, we may not care if sp is unreliable, because we can restore
2263 the register relative to the frame pointer. In theory, since each move
2264 is the same speed as a pop, and we don't need the leal, this is faster.
2265 For now restore multiple registers the old way. */
2267 offset
= - tsize
- (nregs
* UNITS_PER_WORD
);
2269 xops
[2] = stack_pointer_rtx
;
2271 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2272 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2273 moved before any instruction which implicitly uses the got. This
2274 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2276 Alternatively, this could be fixed by making the dependence on the
2277 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2279 if (flag_pic
|| profile_flag
|| profile_block_flag
)
2280 emit_insn (gen_blockage ());
2282 if (nregs
> 1 || ! frame_pointer_needed
)
2284 if (frame_pointer_needed
)
2286 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2288 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2290 output_asm_insn (AS2 (lea
%L2
,%0,%2), xops
);
2293 for (regno
= 0; regno
< limit
; regno
++)
2294 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2295 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2297 xops
[0] = gen_rtx_REG (SImode
, regno
);
2300 emit_insn (gen_pop (xops
[0]));
2302 output_asm_insn ("pop%L0 %0", xops
);
2307 for (regno
= 0; regno
< limit
; regno
++)
2308 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2309 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2311 xops
[0] = gen_rtx_REG (SImode
, regno
);
2312 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2315 emit_move_insn (xops
[0], xops
[1]);
2317 output_asm_insn (AS2 (mov
%L0
,%1,%0), xops
);
2322 if (frame_pointer_needed
)
2324 /* If not an i386, mov & pop is faster than "leave". */
2326 if (TARGET_USE_LEAVE
)
2329 emit_insn (gen_leave());
2331 output_asm_insn ("leave", xops
);
2335 xops
[0] = frame_pointer_rtx
;
2336 xops
[1] = stack_pointer_rtx
;
2340 emit_insn (gen_epilogue_set_stack_ptr());
2341 emit_insn (gen_pop (xops
[0]));
2345 output_asm_insn (AS2 (mov
%L2
,%0,%2), xops
);
2346 output_asm_insn ("pop%L0 %0", xops
);
2353 /* If there is no frame pointer, we must still release the frame. */
2354 xops
[0] = GEN_INT (tsize
);
2357 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2358 gen_rtx (PLUS
, SImode
, xops
[2], xops
[0])));
2360 output_asm_insn (AS2 (add
%L2
,%0,%2), xops
);
2363 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2364 if (profile_block_flag
== 2)
2366 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2370 if (current_function_pops_args
&& current_function_args_size
)
2372 xops
[1] = GEN_INT (current_function_pops_args
);
2374 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2375 asked to pop more, pop return address, do explicit add, and jump
2376 indirectly to the caller. */
2378 if (current_function_pops_args
>= 32768)
2380 /* ??? Which register to use here? */
2381 xops
[0] = gen_rtx_REG (SImode
, 2);
2385 emit_insn (gen_pop (xops
[0]));
2386 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2387 gen_rtx (PLUS
, SImode
, xops
[1], xops
[2])));
2388 emit_jump_insn (xops
[0]);
2392 output_asm_insn ("pop%L0 %0", xops
);
2393 output_asm_insn (AS2 (add
%L2
,%1,%2), xops
);
2394 output_asm_insn ("jmp %*%0", xops
);
2400 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2402 output_asm_insn ("ret %1", xops
);
2408 emit_jump_insn (gen_return_internal ());
2410 output_asm_insn ("ret", xops
);
2414 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2415 that is a valid memory address for an instruction.
2416 The MODE argument is the machine mode for the MEM expression
2417 that wants to use this address.
2419 On x86, legitimate addresses are:
2420 base movl (base),reg
2421 displacement movl disp,reg
2422 base + displacement movl disp(base),reg
2423 index + base movl (base,index),reg
2424 (index + base) + displacement movl disp(base,index),reg
2425 index*scale movl (,index,scale),reg
2426 index*scale + disp movl disp(,index,scale),reg
2427 index*scale + base movl (base,index,scale),reg
2428 (index*scale + base) + disp movl disp(base,index,scale),reg
2430 In each case, scale can be 1, 2, 4, 8. */
2432 /* This is exactly the same as print_operand_addr, except that
2433 it recognizes addresses instead of printing them.
2435 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2436 convert common non-canonical forms to canonical form so that they will
2439 #define ADDR_INVALID(msg,insn) \
2441 if (TARGET_DEBUG_ADDR) \
2443 fprintf (stderr, msg); \
2449 legitimate_address_p (mode
, addr
, strict
)
2450 enum machine_mode mode
;
2454 rtx base
= NULL_RTX
;
2455 rtx indx
= NULL_RTX
;
2456 rtx scale
= NULL_RTX
;
2457 rtx disp
= NULL_RTX
;
2459 if (TARGET_DEBUG_ADDR
)
2462 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2463 GET_MODE_NAME (mode
), strict
);
2468 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2471 else if (GET_CODE (addr
) == PLUS
)
2473 rtx op0
= XEXP (addr
, 0);
2474 rtx op1
= XEXP (addr
, 1);
2475 enum rtx_code code0
= GET_CODE (op0
);
2476 enum rtx_code code1
= GET_CODE (op1
);
2478 if (code0
== REG
|| code0
== SUBREG
)
2480 if (code1
== REG
|| code1
== SUBREG
)
2482 indx
= op0
; /* index + base */
2488 base
= op0
; /* base + displacement */
2493 else if (code0
== MULT
)
2495 indx
= XEXP (op0
, 0);
2496 scale
= XEXP (op0
, 1);
2498 if (code1
== REG
|| code1
== SUBREG
)
2499 base
= op1
; /* index*scale + base */
2502 disp
= op1
; /* index*scale + disp */
2505 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2507 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2508 scale
= XEXP (XEXP (op0
, 0), 1);
2509 base
= XEXP (op0
, 1);
2513 else if (code0
== PLUS
)
2515 indx
= XEXP (op0
, 0); /* index + base + disp */
2516 base
= XEXP (op0
, 1);
2522 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2527 else if (GET_CODE (addr
) == MULT
)
2529 indx
= XEXP (addr
, 0); /* index*scale */
2530 scale
= XEXP (addr
, 1);
2534 disp
= addr
; /* displacement */
2536 /* Allow arg pointer and stack pointer as index if there is not scaling */
2537 if (base
&& indx
&& !scale
2538 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2545 /* Validate base register:
2547 Don't allow SUBREG's here, it can lead to spill failures when the base
2548 is one word out of a two word structure, which is represented internally
2553 if (GET_CODE (base
) != REG
)
2555 ADDR_INVALID ("Base is not a register.\n", base
);
2559 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
2560 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2562 ADDR_INVALID ("Base is not valid.\n", base
);
2567 /* Validate index register:
2569 Don't allow SUBREG's here, it can lead to spill failures when the index
2570 is one word out of a two word structure, which is represented internally
2574 if (GET_CODE (indx
) != REG
)
2576 ADDR_INVALID ("Index is not a register.\n", indx
);
2580 if ((strict
&& ! REG_OK_FOR_INDEX_STRICT_P (indx
))
2581 || (! strict
&& ! REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2583 ADDR_INVALID ("Index is not valid.\n", indx
);
2588 abort (); /* scale w/o index invalid */
2590 /* Validate scale factor: */
2593 HOST_WIDE_INT value
;
2595 if (GET_CODE (scale
) != CONST_INT
)
2597 ADDR_INVALID ("Scale is not valid.\n", scale
);
2601 value
= INTVAL (scale
);
2602 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2604 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2609 /* Validate displacement
2610 Constant pool addresses must be handled special. They are
2611 considered legitimate addresses, but only if not used with regs.
2612 When printed, the output routines know to print the reference with the
2613 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2616 if (GET_CODE (disp
) == SYMBOL_REF
2617 && CONSTANT_POOL_ADDRESS_P (disp
)
2622 else if (!CONSTANT_ADDRESS_P (disp
))
2624 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2628 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2630 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2634 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2635 && base
!= pic_offset_table_rtx
2636 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2638 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2642 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2643 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2645 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2651 if (TARGET_DEBUG_ADDR
)
2652 fprintf (stderr
, "Address is valid.\n");
2654 /* Everything looks valid, return true */
2658 /* Return a legitimate reference for ORIG (an address) using the
2659 register REG. If REG is 0, a new pseudo is generated.
2661 There are three types of references that must be handled:
2663 1. Global data references must load the address from the GOT, via
2664 the PIC reg. An insn is emitted to do this load, and the reg is
2667 2. Static data references must compute the address as an offset
2668 from the GOT, whose base is in the PIC reg. An insn is emitted to
2669 compute the address into a reg, and the reg is returned. Static
2670 data objects have SYMBOL_REF_FLAG set to differentiate them from
2671 global data objects.
2673 3. Constant pool addresses must be handled special. They are
2674 considered legitimate addresses, but only if not used with regs.
2675 When printed, the output routines know to print the reference with the
2676 PIC reg, even though the PIC reg doesn't appear in the RTL.
2678 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2679 reg also appears in the address (except for constant pool references,
2682 "switch" statements also require special handling when generating
2683 PIC code. See comments by the `casesi' insn in i386.md for details. */
2686 legitimize_pic_address (orig
, reg
)
2693 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2695 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2700 reg
= gen_reg_rtx (Pmode
);
2702 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2703 || GET_CODE (addr
) == LABEL_REF
)
2704 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2706 new = gen_rtx_MEM (Pmode
,
2707 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
));
2709 emit_move_insn (reg
, new);
2711 current_function_uses_pic_offset_table
= 1;
2715 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2719 if (GET_CODE (addr
) == CONST
)
2721 addr
= XEXP (addr
, 0);
2722 if (GET_CODE (addr
) != PLUS
)
2726 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2730 reg
= gen_reg_rtx (Pmode
);
2732 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2733 addr
= legitimize_pic_address (XEXP (addr
, 1),
2734 base
== reg
? NULL_RTX
: reg
);
2736 if (GET_CODE (addr
) == CONST_INT
)
2737 return plus_constant (base
, INTVAL (addr
));
2739 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2741 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2742 addr
= XEXP (addr
, 1);
2745 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2750 /* Emit insns to move operands[1] into operands[0]. */
2753 emit_pic_move (operands
, mode
)
2755 enum machine_mode mode
;
2757 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2759 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2760 operands
[1] = force_reg (SImode
, operands
[1]);
2762 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2765 /* Try machine-dependent ways of modifying an illegitimate address
2766 to be legitimate. If we find one, return the new, valid address.
2767 This macro is used in only one place: `memory_address' in explow.c.
2769 OLDX is the address as it was before break_out_memory_refs was called.
2770 In some cases it is useful to look at this to decide what needs to be done.
2772 MODE and WIN are passed so that this macro can use
2773 GO_IF_LEGITIMATE_ADDRESS.
2775 It is always safe for this macro to do nothing. It exists to recognize
2776 opportunities to optimize the output.
2778 For the 80386, we handle X+REG by loading X into a register R and
2779 using R+REG. R will go in a general reg and indexing will be used.
2780 However, if REG is a broken-out memory address or multiplication,
2781 nothing needs to be done because REG can certainly go in a general reg.
2783 When -fpic is used, special handling is needed for symbolic references.
2784 See comments by legitimize_pic_address in i386.c for details. */
2787 legitimize_address (x
, oldx
, mode
)
2790 enum machine_mode mode
;
2795 if (TARGET_DEBUG_ADDR
)
2797 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2798 GET_MODE_NAME (mode
));
2802 if (flag_pic
&& SYMBOLIC_CONST (x
))
2803 return legitimize_pic_address (x
, 0);
2805 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2806 if (GET_CODE (x
) == ASHIFT
2807 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2808 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2811 x
= gen_rtx (MULT
, Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2812 GEN_INT (1 << log
));
2815 if (GET_CODE (x
) == PLUS
)
2817 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2819 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2820 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2821 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2824 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2825 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2826 GEN_INT (1 << log
));
2829 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2830 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2831 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2834 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2835 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2836 GEN_INT (1 << log
));
2839 /* Put multiply first if it isn't already. */
2840 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2842 rtx tmp
= XEXP (x
, 0);
2843 XEXP (x
, 0) = XEXP (x
, 1);
2848 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2849 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2850 created by virtual register instantiation, register elimination, and
2851 similar optimizations. */
2852 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2855 x
= gen_rtx (PLUS
, Pmode
,
2856 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0),
2857 XEXP (XEXP (x
, 1), 0)),
2858 XEXP (XEXP (x
, 1), 1));
2862 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2863 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2864 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2865 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2866 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2867 && CONSTANT_P (XEXP (x
, 1)))
2870 rtx other
= NULL_RTX
;
2872 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2874 constant
= XEXP (x
, 1);
2875 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2877 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2879 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2880 other
= XEXP (x
, 1);
2888 x
= gen_rtx (PLUS
, Pmode
,
2889 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2890 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2891 plus_constant (other
, INTVAL (constant
)));
2895 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2898 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2901 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2904 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2907 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2911 && GET_CODE (XEXP (x
, 1)) == REG
2912 && GET_CODE (XEXP (x
, 0)) == REG
)
2915 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2918 x
= legitimize_pic_address (x
, 0);
2921 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2924 if (GET_CODE (XEXP (x
, 0)) == REG
)
2926 register rtx temp
= gen_reg_rtx (Pmode
);
2927 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2929 emit_move_insn (temp
, val
);
2935 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2937 register rtx temp
= gen_reg_rtx (Pmode
);
2938 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2940 emit_move_insn (temp
, val
);
2950 /* Print an integer constant expression in assembler syntax. Addition
2951 and subtraction are the only arithmetic that may appear in these
2952 expressions. FILE is the stdio stream to write to, X is the rtx, and
2953 CODE is the operand print code from the output string. */
2956 output_pic_addr_const (file
, x
, code
)
2963 switch (GET_CODE (x
))
2974 if (GET_CODE (x
) == SYMBOL_REF
)
2975 assemble_name (file
, XSTR (x
, 0));
2978 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2979 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2980 assemble_name (asm_out_file
, buf
);
2984 ; /* No suffix, dammit. */
2985 else if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2986 fprintf (file
, "@GOTOFF(%%ebx)");
2987 else if (code
== 'P')
2988 fprintf (file
, "@PLT");
2989 else if (GET_CODE (x
) == LABEL_REF
)
2990 fprintf (file
, "@GOTOFF");
2991 else if (! SYMBOL_REF_FLAG (x
))
2992 fprintf (file
, "@GOT");
2994 fprintf (file
, "@GOTOFF");
2999 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3000 assemble_name (asm_out_file
, buf
);
3004 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3008 /* This used to output parentheses around the expression,
3009 but that does not work on the 386 (either ATT or BSD assembler). */
3010 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3014 if (GET_MODE (x
) == VOIDmode
)
3016 /* We can use %d if the number is <32 bits and positive. */
3017 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
3018 fprintf (file
, "0x%lx%08lx",
3019 (unsigned long) CONST_DOUBLE_HIGH (x
),
3020 (unsigned long) CONST_DOUBLE_LOW (x
));
3022 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3025 /* We can't handle floating point constants;
3026 PRINT_OPERAND must handle them. */
3027 output_operand_lossage ("floating constant misused");
3031 /* Some assemblers need integer constants to appear first. */
3032 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3034 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3035 if (INTVAL (XEXP (x
, 1)) >= 0)
3036 fprintf (file
, "+");
3037 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3041 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3042 if (INTVAL (XEXP (x
, 0)) >= 0)
3043 fprintf (file
, "+");
3044 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3049 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3050 fprintf (file
, "-");
3051 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3055 output_operand_lossage ("invalid expression as operand");
3059 /* Append the correct conditional move suffix which corresponds to CODE. */
3062 put_condition_code (code
, reverse_cc
, mode
, file
)
3065 enum mode_class mode
;
3068 int ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
3069 && ! (cc_prev_status
.flags
& CC_FCOMI
));
3070 if (reverse_cc
&& ! ieee
)
3071 code
= reverse_condition (code
);
3073 if (mode
== MODE_INT
)
3077 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3084 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3123 output_operand_lossage ("Invalid %%C operand");
3126 else if (mode
== MODE_FLOAT
)
3130 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
);
3133 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
);
3136 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3139 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3142 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3145 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3148 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3151 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3154 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3157 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3160 output_operand_lossage ("Invalid %%C operand");
3165 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3166 C -- print opcode suffix for set/cmov insn.
3167 c -- like C, but print reversed condition
3168 F -- print opcode suffix for fcmov insn.
3169 f -- like C, but print reversed condition
3170 R -- print the prefix for register names.
3171 z -- print the opcode suffix for the size of the current operand.
3172 * -- print a star (in certain assembler syntax)
3173 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3174 c -- don't print special prefixes before constant operands.
3175 J -- print the appropriate jump operand.
3176 s -- print a shift double count, followed by the assemblers argument
3178 b -- print the QImode name of the register for the indicated operand.
3179 %b0 would print %al if operands[0] is reg 0.
3180 w -- likewise, print the HImode name of the register.
3181 k -- likewise, print the SImode name of the register.
3182 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3183 y -- print "st(0)" instead of "st" as a register.
3184 P -- print as a PIC constant */
3187 print_operand (file
, x
, code
)
3202 PUT_OP_SIZE (code
, 'l', file
);
3206 PUT_OP_SIZE (code
, 'w', file
);
3210 PUT_OP_SIZE (code
, 'b', file
);
3214 PUT_OP_SIZE (code
, 'l', file
);
3218 PUT_OP_SIZE (code
, 's', file
);
3222 PUT_OP_SIZE (code
, 't', file
);
3226 /* 387 opcodes don't get size suffixes if the operands are
3229 if (STACK_REG_P (x
))
3232 /* this is the size of op from size of operand */
3233 switch (GET_MODE_SIZE (GET_MODE (x
)))
3236 PUT_OP_SIZE ('B', 'b', file
);
3240 PUT_OP_SIZE ('W', 'w', file
);
3244 if (GET_MODE (x
) == SFmode
)
3246 PUT_OP_SIZE ('S', 's', file
);
3250 PUT_OP_SIZE ('L', 'l', file
);
3254 PUT_OP_SIZE ('T', 't', file
);
3258 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3260 #ifdef GAS_MNEMONICS
3261 PUT_OP_SIZE ('Q', 'q', file
);
3264 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3268 PUT_OP_SIZE ('Q', 'l', file
);
3282 switch (GET_CODE (x
))
3284 /* These conditions are appropriate for testing the result
3285 of an arithmetic operation, not for a compare operation.
3286 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3287 CC_Z_IN_NOT_C false and not floating point. */
3288 case NE
: fputs ("jne", file
); return;
3289 case EQ
: fputs ("je", file
); return;
3290 case GE
: fputs ("jns", file
); return;
3291 case LT
: fputs ("js", file
); return;
3292 case GEU
: fputs ("jmp", file
); return;
3293 case GTU
: fputs ("jne", file
); return;
3294 case LEU
: fputs ("je", file
); return;
3295 case LTU
: fputs ("#branch never", file
); return;
3297 /* no matching branches for GT nor LE */
3304 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3306 PRINT_OPERAND (file
, x
, 0);
3307 fputs (AS2C (,) + 1, file
);
3312 /* This is used by the conditional move instructions. */
3314 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3317 /* Like above, but reverse condition */
3319 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3322 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3325 /* Like above, but reverse condition */
3327 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3334 sprintf (str
, "invalid operand code `%c'", code
);
3335 output_operand_lossage (str
);
3340 if (GET_CODE (x
) == REG
)
3342 PRINT_REG (x
, code
, file
);
3345 else if (GET_CODE (x
) == MEM
)
3347 PRINT_PTR (x
, file
);
3348 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3351 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3353 output_addr_const (file
, XEXP (x
, 0));
3356 output_address (XEXP (x
, 0));
3359 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3364 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3365 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3366 PRINT_IMMED_PREFIX (file
);
3367 fprintf (file
, "0x%x", l
);
3370 /* These float cases don't actually occur as immediate operands. */
3371 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3376 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3377 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3378 fprintf (file
, "%s", dstr
);
3381 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3386 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3387 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3388 fprintf (file
, "%s", dstr
);
3394 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3395 PRINT_IMMED_PREFIX (file
);
3396 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3397 || GET_CODE (x
) == LABEL_REF
)
3398 PRINT_OFFSET_PREFIX (file
);
3401 output_pic_addr_const (file
, x
, code
);
3403 output_addr_const (file
, x
);
3407 /* Print a memory operand whose address is ADDR. */
3410 print_operand_address (file
, addr
)
3414 register rtx reg1
, reg2
, breg
, ireg
;
3417 switch (GET_CODE (addr
))
3421 fprintf (file
, "%se", RP
);
3422 fputs (hi_reg_name
[REGNO (addr
)], file
);
3432 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3434 offset
= XEXP (addr
, 0);
3435 addr
= XEXP (addr
, 1);
3437 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3439 offset
= XEXP (addr
, 1);
3440 addr
= XEXP (addr
, 0);
3443 if (GET_CODE (addr
) != PLUS
)
3445 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3446 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3447 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3448 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3449 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3450 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3451 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3452 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3454 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3471 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3472 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3477 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3483 if (ireg
!= 0 || breg
!= 0)
3490 output_pic_addr_const (file
, addr
, 0);
3491 else if (GET_CODE (addr
) == LABEL_REF
)
3492 output_asm_label (addr
);
3494 output_addr_const (file
, addr
);
3497 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3499 scale
= INTVAL (XEXP (ireg
, 1));
3500 ireg
= XEXP (ireg
, 0);
3503 /* The stack pointer can only appear as a base register,
3504 never an index register, so exchange the regs if it is wrong. */
3506 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3515 /* output breg+ireg*scale */
3516 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3524 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3526 scale
= INTVAL (XEXP (addr
, 0));
3527 ireg
= XEXP (addr
, 1);
3531 scale
= INTVAL (XEXP (addr
, 1));
3532 ireg
= XEXP (addr
, 0);
3535 output_addr_const (file
, const0_rtx
);
3536 PRINT_B_I_S (NULL_RTX
, ireg
, scale
, file
);
3541 if (GET_CODE (addr
) == CONST_INT
3542 && INTVAL (addr
) < 0x8000
3543 && INTVAL (addr
) >= -0x8000)
3544 fprintf (file
, "%d", (int) INTVAL (addr
));
3548 output_pic_addr_const (file
, addr
, 0);
3550 output_addr_const (file
, addr
);
3555 /* Set the cc_status for the results of an insn whose pattern is EXP.
3556 On the 80386, we assume that only test and compare insns, as well
3557 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3558 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3559 Also, we assume that jumps, moves and sCOND don't affect the condition
3560 codes. All else clobbers the condition codes, by assumption.
3562 We assume that ALL integer add, minus, etc. instructions effect the
3563 condition codes. This MUST be consistent with i386.md.
3565 We don't record any float test or compare - the redundant test &
3566 compare check in final.c does not handle stack-like regs correctly. */
3569 notice_update_cc (exp
)
3572 if (GET_CODE (exp
) == SET
)
3574 /* Jumps do not alter the cc's. */
3575 if (SET_DEST (exp
) == pc_rtx
)
3578 /* Moving register or memory into a register:
3579 it doesn't alter the cc's, but it might invalidate
3580 the RTX's which we remember the cc's came from.
3581 (Note that moving a constant 0 or 1 MAY set the cc's). */
3582 if (REG_P (SET_DEST (exp
))
3583 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3584 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3586 if (cc_status
.value1
3587 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3588 cc_status
.value1
= 0;
3590 if (cc_status
.value2
3591 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3592 cc_status
.value2
= 0;
3597 /* Moving register into memory doesn't alter the cc's.
3598 It may invalidate the RTX's which we remember the cc's came from. */
3599 if (GET_CODE (SET_DEST (exp
)) == MEM
3600 && (REG_P (SET_SRC (exp
))
3601 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3603 if (cc_status
.value1
3604 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3605 cc_status
.value1
= 0;
3606 if (cc_status
.value2
3607 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3608 cc_status
.value2
= 0;
3613 /* Function calls clobber the cc's. */
3614 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3620 /* Tests and compares set the cc's in predictable ways. */
3621 else if (SET_DEST (exp
) == cc0_rtx
)
3624 cc_status
.value1
= SET_SRC (exp
);
3628 /* Certain instructions effect the condition codes. */
3629 else if (GET_MODE (SET_SRC (exp
)) == SImode
3630 || GET_MODE (SET_SRC (exp
)) == HImode
3631 || GET_MODE (SET_SRC (exp
)) == QImode
)
3632 switch (GET_CODE (SET_SRC (exp
)))
3634 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3635 /* Shifts on the 386 don't set the condition codes if the
3636 shift count is zero. */
3637 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3643 /* We assume that the CONST_INT is non-zero (this rtx would
3644 have been deleted if it were zero. */
3646 case PLUS
: case MINUS
: case NEG
:
3647 case AND
: case IOR
: case XOR
:
3648 cc_status
.flags
= CC_NO_OVERFLOW
;
3649 cc_status
.value1
= SET_SRC (exp
);
3650 cc_status
.value2
= SET_DEST (exp
);
3653 /* This is the bsf pattern used by ffs. */
3655 if (XINT (SET_SRC (exp
), 1) == 5)
3657 /* Only the Z flag is defined after bsf. */
3659 = CC_NOT_POSITIVE
| CC_NOT_NEGATIVE
| CC_NO_OVERFLOW
;
3660 cc_status
.value1
= XVECEXP (SET_SRC (exp
), 0, 0);
3673 else if (GET_CODE (exp
) == PARALLEL
3674 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3676 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3678 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3682 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3684 cc_status
.flags
|= CC_IN_80387
;
3685 if (TARGET_CMOVE
&& stack_regs_mentioned_p
3686 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3687 cc_status
.flags
|= CC_FCOMI
;
3690 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3702 /* Split one or more DImode RTL references into pairs of SImode
3703 references. The RTL can be REG, offsettable MEM, integer constant, or
3704 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3705 split and "num" is its length. lo_half and hi_half are output arrays
3706 that parallel "operands". */
3709 split_di (operands
, num
, lo_half
, hi_half
)
3712 rtx lo_half
[], hi_half
[];
3716 rtx op
= operands
[num
];
3717 if (GET_CODE (op
) == REG
)
3719 lo_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
));
3720 hi_half
[num
] = gen_rtx_REG (SImode
, REGNO (op
) + 1);
3722 else if (CONSTANT_P (op
))
3723 split_double (op
, &lo_half
[num
], &hi_half
[num
]);
3724 else if (offsettable_memref_p (op
))
3726 rtx lo_addr
= XEXP (op
, 0);
3727 rtx hi_addr
= XEXP (adj_offsettable_operand (op
, 4), 0);
3728 lo_half
[num
] = change_address (op
, SImode
, lo_addr
);
3729 hi_half
[num
] = change_address (op
, SImode
, hi_addr
);
3736 /* Return 1 if this is a valid binary operation on a 387.
3737 OP is the expression matched, and MODE is its mode. */
3740 binary_387_op (op
, mode
)
3742 enum machine_mode mode
;
3744 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3747 switch (GET_CODE (op
))
3753 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3760 /* Return 1 if this is a valid shift or rotate operation on a 386.
3761 OP is the expression matched, and MODE is its mode. */
3766 enum machine_mode mode
;
3768 rtx operand
= XEXP (op
, 0);
3770 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3773 if (GET_MODE (operand
) != GET_MODE (op
)
3774 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3777 return (GET_CODE (op
) == ASHIFT
3778 || GET_CODE (op
) == ASHIFTRT
3779 || GET_CODE (op
) == LSHIFTRT
3780 || GET_CODE (op
) == ROTATE
3781 || GET_CODE (op
) == ROTATERT
);
3784 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3785 MODE is not used. */
3788 VOIDmode_compare_op (op
, mode
)
3790 enum machine_mode mode
;
3792 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3795 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3796 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3797 is the expression of the binary operation. The output may either be
3798 emitted here, or returned to the caller, like all output_* functions.
3800 There is no guarantee that the operands are the same mode, as they
3801 might be within FLOAT or FLOAT_EXTEND expressions. */
3804 output_387_binary_op (insn
, operands
)
3810 static char buf
[100];
3812 switch (GET_CODE (operands
[3]))
3815 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3816 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3823 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3824 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3831 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3832 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3839 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3840 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3850 strcpy (buf
, base_op
);
3852 switch (GET_CODE (operands
[3]))
3856 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3859 operands
[2] = operands
[1];
3863 if (GET_CODE (operands
[2]) == MEM
)
3864 return strcat (buf
, AS1 (%z2
,%2));
3866 if (NON_STACK_REG_P (operands
[1]))
3868 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3872 else if (NON_STACK_REG_P (operands
[2]))
3874 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3878 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3879 return strcat (buf
, AS2 (p
,%2,%0));
3881 if (STACK_TOP_P (operands
[0]))
3882 return strcat (buf
, AS2C (%y2
,%0));
3884 return strcat (buf
, AS2C (%2,%0));
3888 if (GET_CODE (operands
[1]) == MEM
)
3889 return strcat (buf
, AS1 (r
%z1
,%1));
3891 if (GET_CODE (operands
[2]) == MEM
)
3892 return strcat (buf
, AS1 (%z2
,%2));
3894 if (NON_STACK_REG_P (operands
[1]))
3896 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3900 else if (NON_STACK_REG_P (operands
[2]))
3902 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3906 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3909 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3910 return strcat (buf
, AS2 (rp
,%2,%0));
3912 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3913 return strcat (buf
, AS2 (p
,%1,%0));
3915 if (STACK_TOP_P (operands
[0]))
3917 if (STACK_TOP_P (operands
[1]))
3918 return strcat (buf
, AS2C (%y2
,%0));
3920 return strcat (buf
, AS2 (r
,%y1
,%0));
3922 else if (STACK_TOP_P (operands
[1]))
3923 return strcat (buf
, AS2C (%1,%0));
3925 return strcat (buf
, AS2 (r
,%2,%0));
3932 /* Output code for INSN to convert a float to a signed int. OPERANDS
3933 are the insn operands. The output may be SFmode or DFmode and the
3934 input operand may be SImode or DImode. As a special case, make sure
3935 that the 387 stack top dies if the output mode is DImode, because the
3936 hardware requires this. */
3939 output_fix_trunc (insn
, operands
)
3943 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3946 if (! STACK_TOP_P (operands
[1]))
3949 xops
[0] = GEN_INT (12);
3950 xops
[1] = operands
[4];
3952 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3953 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3954 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3955 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3956 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3958 if (NON_STACK_REG_P (operands
[0]))
3959 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
3961 else if (GET_CODE (operands
[0]) == MEM
)
3964 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3965 else if (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
)
3967 /* There is no DImode version of this without a stack pop, so
3968 we must emulate it. It doesn't matter much what the second
3969 instruction is, because the value being pushed on the FP stack
3970 is not used except for the following stack popping store.
3971 This case can only happen without optimization, so it doesn't
3972 matter that it is inefficient. */
3973 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3974 output_asm_insn (AS1 (fild
%z0
,%0), operands
);
3977 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3982 return AS1 (fldc
%W2
,%2);
3985 /* Output code for INSN to compare OPERANDS. The two operands might
3986 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3987 expression. If the compare is in mode CCFPEQmode, use an opcode that
3988 will not fault if a qNaN is present. */
3991 output_float_compare (insn
, operands
)
3996 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3997 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
4000 if (TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
4002 cc_status
.flags
|= CC_FCOMI
;
4003 cc_prev_status
.flags
&= ~CC_TEST_AX
;
4006 if (! STACK_TOP_P (operands
[0]))
4009 operands
[0] = operands
[1];
4011 cc_status
.flags
|= CC_REVERSED
;
4014 if (! STACK_TOP_P (operands
[0]))
4017 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
4019 if (STACK_REG_P (operands
[1])
4021 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
4022 && REGNO (operands
[1]) != FIRST_STACK_REG
)
4024 /* If both the top of the 387 stack dies, and the other operand
4025 is also a stack register that dies, then this must be a
4026 `fcompp' float compare */
4028 if (unordered_compare
)
4030 if (cc_status
.flags
& CC_FCOMI
)
4032 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
4033 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4037 output_asm_insn ("fucompp", operands
);
4041 if (cc_status
.flags
& CC_FCOMI
)
4043 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
4044 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4048 output_asm_insn ("fcompp", operands
);
4053 static char buf
[100];
4055 /* Decide if this is the integer or float compare opcode, or the
4056 unordered float compare. */
4058 if (unordered_compare
)
4059 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
4060 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
4061 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
4063 strcpy (buf
, "ficom");
4065 /* Modify the opcode if the 387 stack is to be popped. */
4070 if (NON_STACK_REG_P (operands
[1]))
4071 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4072 else if (cc_status
.flags
& CC_FCOMI
)
4074 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
4078 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
4081 /* Now retrieve the condition code. */
4083 return output_fp_cc0_set (insn
);
4086 /* Output opcodes to transfer the results of FP compare or test INSN
4087 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4088 result of the compare or test is unordered, no comparison operator
4089 succeeds except NE. Return an output template, if any. */
4092 output_fp_cc0_set (insn
)
4099 xops
[0] = gen_rtx_REG (HImode
, 0);
4100 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
4102 if (! TARGET_IEEE_FP
)
4104 if (!(cc_status
.flags
& CC_REVERSED
))
4106 next
= next_cc0_user (insn
);
4108 if (GET_CODE (next
) == JUMP_INSN
4109 && GET_CODE (PATTERN (next
)) == SET
4110 && SET_DEST (PATTERN (next
)) == pc_rtx
4111 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4112 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4113 else if (GET_CODE (PATTERN (next
)) == SET
)
4114 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4118 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
4119 || code
== LE
|| code
== GE
)
4121 /* We will test eax directly. */
4122 cc_status
.flags
|= CC_TEST_AX
;
4130 next
= next_cc0_user (insn
);
4131 if (next
== NULL_RTX
)
4134 if (GET_CODE (next
) == JUMP_INSN
4135 && GET_CODE (PATTERN (next
)) == SET
4136 && SET_DEST (PATTERN (next
)) == pc_rtx
4137 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4138 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4139 else if (GET_CODE (PATTERN (next
)) == SET
)
4141 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4142 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4144 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4147 else if (GET_CODE (PATTERN (next
)) == PARALLEL
4148 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
4150 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
4151 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
4153 code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
4158 xops
[0] = gen_rtx_REG (QImode
, 0);
4163 xops
[1] = GEN_INT (0x45);
4164 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4169 xops
[1] = GEN_INT (0x45);
4170 xops
[2] = GEN_INT (0x01);
4171 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4172 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4177 xops
[1] = GEN_INT (0x05);
4178 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4183 xops
[1] = GEN_INT (0x45);
4184 xops
[2] = GEN_INT (0x40);
4185 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4186 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
4187 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4192 xops
[1] = GEN_INT (0x45);
4193 xops
[2] = GEN_INT (0x40);
4194 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4195 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4200 xops
[1] = GEN_INT (0x44);
4201 xops
[2] = GEN_INT (0x40);
4202 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4203 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
4218 #define MAX_386_STACK_LOCALS 2
4220 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4222 /* Define the structure for the machine field in struct function. */
4223 struct machine_function
4225 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4227 char pic_label_name
[256];
4230 /* Functions to save and restore i386_stack_locals.
4231 These will be called, via pointer variables,
4232 from push_function_context and pop_function_context. */
4235 save_386_machine_status (p
)
4239 = (struct machine_function
*) xmalloc (sizeof (struct machine_function
));
4240 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
4241 sizeof i386_stack_locals
);
4242 p
->machine
->pic_label_rtx
= pic_label_rtx
;
4243 bcopy (pic_label_name
, p
->machine
->pic_label_name
, 256);
4247 restore_386_machine_status (p
)
4250 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
4251 sizeof i386_stack_locals
);
4252 pic_label_rtx
= p
->machine
->pic_label_rtx
;
4253 bcopy (p
->machine
->pic_label_name
, pic_label_name
, 256);
4258 /* Clear stack slot assignments remembered from previous functions.
4259 This is called from INIT_EXPANDERS once before RTL is emitted for each
4263 clear_386_stack_locals ()
4265 enum machine_mode mode
;
4268 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4269 mode
= (enum machine_mode
) ((int) mode
+ 1))
4270 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4271 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4273 pic_label_rtx
= NULL_RTX
;
4274 bzero (pic_label_name
, 256);
4275 /* Arrange to save and restore i386_stack_locals around nested functions. */
4276 save_machine_status
= save_386_machine_status
;
4277 restore_machine_status
= restore_386_machine_status
;
4280 /* Return a MEM corresponding to a stack slot with mode MODE.
4281 Allocate a new slot if necessary.
4283 The RTL for a function can have several slots available: N is
4284 which slot to use. */
4287 assign_386_stack_local (mode
, n
)
4288 enum machine_mode mode
;
4291 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4294 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4295 i386_stack_locals
[(int) mode
][n
]
4296 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4298 return i386_stack_locals
[(int) mode
][n
];
4303 enum machine_mode mode
;
4305 return (GET_CODE (op
) == MULT
);
4310 enum machine_mode mode
;
4312 return (GET_CODE (op
) == DIV
);
4316 /* Create a new copy of an rtx.
4317 Recursively copies the operands of the rtx,
4318 except for those few rtx codes that are sharable.
4319 Doesn't share CONST */
4327 register RTX_CODE code
;
4328 register char *format_ptr
;
4330 code
= GET_CODE (orig
);
4343 /* SCRATCH must be shared because they represent distinct values. */
4348 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4349 a LABEL_REF, it isn't sharable. */
4350 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4351 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4352 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4356 /* A MEM with a constant address is not sharable. The problem is that
4357 the constant address may need to be reloaded. If the mem is shared,
4358 then reloading one copy of this mem will cause all copies to appear
4359 to have been reloaded. */
4362 copy
= rtx_alloc (code
);
4363 PUT_MODE (copy
, GET_MODE (orig
));
4364 copy
->in_struct
= orig
->in_struct
;
4365 copy
->volatil
= orig
->volatil
;
4366 copy
->unchanging
= orig
->unchanging
;
4367 copy
->integrated
= orig
->integrated
;
4369 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4371 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4373 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4375 switch (*format_ptr
++)
4378 XEXP (copy
, i
) = XEXP (orig
, i
);
4379 if (XEXP (orig
, i
) != NULL
)
4380 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4385 XEXP (copy
, i
) = XEXP (orig
, i
);
4390 XVEC (copy
, i
) = XVEC (orig
, i
);
4391 if (XVEC (orig
, i
) != NULL
)
4393 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4394 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4395 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4400 XWINT (copy
, i
) = XWINT (orig
, i
);
4404 XINT (copy
, i
) = XINT (orig
, i
);
4409 XSTR (copy
, i
) = XSTR (orig
, i
);
4420 /* Try to rewrite a memory address to make it valid */
4423 rewrite_address (mem_rtx
)
4426 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4428 int offset_adjust
= 0;
4429 int was_only_offset
= 0;
4430 rtx mem_addr
= XEXP (mem_rtx
, 0);
4431 char *storage
= oballoc (0);
4433 int is_spill_rtx
= 0;
4435 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4436 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4438 if (GET_CODE (mem_addr
) == PLUS
4439 && GET_CODE (XEXP (mem_addr
, 1)) == PLUS
4440 && GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4442 /* This part is utilized by the combiner. */
4444 = gen_rtx (PLUS
, GET_MODE (mem_addr
),
4445 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4446 XEXP (mem_addr
, 0), XEXP (XEXP (mem_addr
, 1), 0)),
4447 XEXP (XEXP (mem_addr
, 1), 1));
4449 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4451 XEXP (mem_rtx
, 0) = ret_rtx
;
4452 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4459 /* This part is utilized by loop.c.
4460 If the address contains PLUS (reg,const) and this pattern is invalid
4461 in this case - try to rewrite the address to make it valid. */
4462 storage
= oballoc (0);
4463 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4465 /* Find the base index and offset elements of the memory address. */
4466 if (GET_CODE (mem_addr
) == PLUS
)
4468 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4470 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4471 base_rtx
= XEXP (mem_addr
, 1), index_rtx
= XEXP (mem_addr
, 0);
4473 base_rtx
= XEXP (mem_addr
, 0), offset_rtx
= XEXP (mem_addr
, 1);
4476 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4478 index_rtx
= XEXP (mem_addr
, 0);
4479 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4480 base_rtx
= XEXP (mem_addr
, 1);
4482 offset_rtx
= XEXP (mem_addr
, 1);
4485 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4487 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
4488 && GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
4489 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0))
4491 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1))
4493 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1))
4495 && GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
4496 && GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4498 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4499 offset_rtx
= XEXP (mem_addr
, 1);
4500 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4501 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4505 offset_rtx
= XEXP (mem_addr
, 1);
4506 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4507 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4511 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4513 was_only_offset
= 1;
4516 offset_rtx
= XEXP (mem_addr
, 1);
4517 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4518 if (offset_adjust
== 0)
4520 XEXP (mem_rtx
, 0) = offset_rtx
;
4521 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4531 else if (GET_CODE (mem_addr
) == MULT
)
4532 index_rtx
= mem_addr
;
4539 if (index_rtx
!= 0 && GET_CODE (index_rtx
) == MULT
)
4541 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4547 scale_rtx
= XEXP (index_rtx
, 1);
4548 scale
= INTVAL (scale_rtx
);
4549 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4552 /* Now find which of the elements are invalid and try to fix them. */
4553 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4555 offset_adjust
= INTVAL (index_rtx
) * scale
;
4557 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4558 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4559 else if (offset_rtx
== 0)
4560 offset_rtx
= const0_rtx
;
4562 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4563 XEXP (mem_rtx
, 0) = offset_rtx
;
4567 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
4568 && GET_CODE (XEXP (base_rtx
, 0)) == REG
4569 && GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4571 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4572 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4575 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4577 offset_adjust
+= INTVAL (base_rtx
);
4581 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
4582 && GET_CODE (XEXP (index_rtx
, 0)) == REG
4583 && GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4585 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4586 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4591 if (! LEGITIMATE_INDEX_P (index_rtx
)
4592 && ! (index_rtx
== stack_pointer_rtx
&& scale
== 1
4593 && base_rtx
== NULL
))
4602 if (! LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4609 if (offset_adjust
!= 0)
4611 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4612 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4614 offset_rtx
= const0_rtx
;
4622 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4623 gen_rtx (MULT
, GET_MODE (index_rtx
),
4624 index_rtx
, scale_rtx
),
4627 if (GET_CODE (offset_rtx
) != CONST_INT
4628 || INTVAL (offset_rtx
) != 0)
4629 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4630 ret_rtx
, offset_rtx
);
4634 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4635 index_rtx
, base_rtx
);
4637 if (GET_CODE (offset_rtx
) != CONST_INT
4638 || INTVAL (offset_rtx
) != 0)
4639 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4640 ret_rtx
, offset_rtx
);
4647 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
),
4648 index_rtx
, scale_rtx
);
4650 if (GET_CODE (offset_rtx
) != CONST_INT
4651 || INTVAL (offset_rtx
) != 0)
4652 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4653 ret_rtx
, offset_rtx
);
4657 if (GET_CODE (offset_rtx
) == CONST_INT
4658 && INTVAL (offset_rtx
) == 0)
4659 ret_rtx
= index_rtx
;
4661 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4662 index_rtx
, offset_rtx
);
4670 if (GET_CODE (offset_rtx
) == CONST_INT
4671 && INTVAL (offset_rtx
) == 0)
4674 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
,
4677 else if (was_only_offset
)
4678 ret_rtx
= offset_rtx
;
4686 XEXP (mem_rtx
, 0) = ret_rtx
;
4687 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4698 /* Return 1 if the first insn to set cc before INSN also sets the register
4699 REG_RTX; otherwise return 0. */
4701 last_to_set_cc (reg_rtx
, insn
)
4704 rtx prev_insn
= PREV_INSN (insn
);
4708 if (GET_CODE (prev_insn
) == NOTE
)
4711 else if (GET_CODE (prev_insn
) == INSN
)
4713 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4716 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4718 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4724 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4731 prev_insn
= PREV_INSN (prev_insn
);
4738 doesnt_set_condition_code (pat
)
4741 switch (GET_CODE (pat
))
4754 sets_condition_code (pat
)
4757 switch (GET_CODE (pat
))
4779 str_immediate_operand (op
, mode
)
4781 enum machine_mode mode
;
4783 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4793 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4794 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4795 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4796 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4802 /* Return 1 if the mode of the SET_DEST of insn is floating point
4803 and it is not an fld or a move from memory to memory.
4804 Otherwise return 0 */
4810 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4811 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4812 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4813 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4814 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4815 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4816 && GET_CODE (SET_SRC (insn
)) != MEM
)
4822 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4823 memory and the source is a register. */
4829 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4830 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4831 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4832 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4833 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4834 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4840 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4841 or index to reference memory.
4842 otherwise return 0 */
4845 agi_dependent (insn
, dep_insn
)
4848 if (GET_CODE (dep_insn
) == INSN
4849 && GET_CODE (PATTERN (dep_insn
)) == SET
4850 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4851 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
);
4853 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4854 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4855 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4856 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4857 return reg_mentioned_in_mem (stack_pointer_rtx
, insn
);
4862 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4863 otherwise return 0. */
4866 reg_mentioned_in_mem (reg
, rtl
)
4871 register enum rtx_code code
;
4876 code
= GET_CODE (rtl
);
4894 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4897 fmt
= GET_RTX_FORMAT (code
);
4898 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4902 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4903 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4907 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4914 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4916 operands[0] = result, initialized with the startaddress
4917 operands[1] = alignment of the address.
4918 operands[2] = scratch register, initialized with the startaddress when
4919 not aligned, otherwise undefined
4921 This is just the body. It needs the initialisations mentioned above and
4922 some address computing at the end. These things are done in i386.md. */
4925 output_strlen_unroll (operands
)
4930 xops
[0] = operands
[0]; /* Result */
4931 /* operands[1]; * Alignment */
4932 xops
[1] = operands
[2]; /* Scratch */
4933 xops
[2] = GEN_INT (0);
4934 xops
[3] = GEN_INT (2);
4935 xops
[4] = GEN_INT (3);
4936 xops
[5] = GEN_INT (4);
4937 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4938 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4939 xops
[8] = gen_label_rtx (); /* label of main loop */
4941 if (TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4942 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4944 xops
[10] = gen_label_rtx (); /* end label 2 */
4945 xops
[11] = gen_label_rtx (); /* end label 1 */
4946 xops
[12] = gen_label_rtx (); /* end label */
4947 /* xops[13] * Temporary used */
4948 xops
[14] = GEN_INT (0xff);
4949 xops
[15] = GEN_INT (0xff00);
4950 xops
[16] = GEN_INT (0xff0000);
4951 xops
[17] = GEN_INT (0xff000000);
4953 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4955 /* Is there a known alignment and is it less than 4? */
4956 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4958 /* Is there a known alignment and is it not 2? */
4959 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4961 xops
[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4962 xops
[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4964 /* Leave just the 3 lower bits.
4965 If this is a q-register, then the high part is used later
4966 therefore use andl rather than andb. */
4967 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4969 /* Is aligned to 4-byte address when zero */
4970 output_asm_insn (AS1 (je
,%l8
), xops
);
4972 /* Side-effect even Parity when %eax == 3 */
4973 output_asm_insn (AS1 (jp
,%6), xops
);
4975 /* Is it aligned to 2 bytes ? */
4976 if (QI_REG_P (xops
[1]))
4977 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4979 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4981 output_asm_insn (AS1 (je
,%7), xops
);
4985 /* Since the alignment is 2, we have to check 2 or 0 bytes;
4986 check if is aligned to 4 - byte. */
4987 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4989 /* Is aligned to 4-byte address when zero */
4990 output_asm_insn (AS1 (je
,%l8
), xops
);
4993 xops
[13] = gen_rtx_MEM (QImode
, xops
[0]);
4995 /* Now compare the bytes; compare with the high part of a q-reg
4996 gives shorter code. */
4997 if (QI_REG_P (xops
[1]))
4999 /* Compare the first n unaligned byte on a byte per byte basis. */
5000 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5002 /* When zero we reached the end. */
5003 output_asm_insn (AS1 (je
,%l12
), xops
);
5005 /* Increment the address. */
5006 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5008 /* Not needed with an alignment of 2 */
5009 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
5011 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5012 CODE_LABEL_NUMBER (xops
[7]));
5013 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5014 output_asm_insn (AS1 (je
,%l12
), xops
);
5015 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5017 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5018 CODE_LABEL_NUMBER (xops
[6]));
5021 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
5025 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5026 output_asm_insn (AS1 (je
,%l12
), xops
);
5027 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5029 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5030 CODE_LABEL_NUMBER (xops
[7]));
5031 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5032 output_asm_insn (AS1 (je
,%l12
), xops
);
5033 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5035 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5036 CODE_LABEL_NUMBER (xops
[6]));
5037 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
5040 output_asm_insn (AS1 (je
,%l12
), xops
);
5041 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5044 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5045 align this loop. It gives only huge programs, but does not help to
5047 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
5049 xops
[13] = gen_rtx_MEM (SImode
, xops
[0]);
5050 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
5052 if (QI_REG_P (xops
[1]))
5054 /* On i586 it is faster to combine the hi- and lo- part as
5055 a kind of lookahead. If anding both yields zero, then one
5056 of both *could* be zero, otherwise none of both is zero;
5057 this saves one instruction, on i486 this is slower
5058 tested with P-90, i486DX2-66, AMD486DX2-66 */
5061 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
5062 output_asm_insn (AS1 (jne
,%l9
), xops
);
5065 /* Check first byte. */
5066 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
5067 output_asm_insn (AS1 (je
,%l12
), xops
);
5069 /* Check second byte. */
5070 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
5071 output_asm_insn (AS1 (je
,%l11
), xops
);
5074 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5075 CODE_LABEL_NUMBER (xops
[9]));
5080 /* Check first byte. */
5081 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
5082 output_asm_insn (AS1 (je
,%l12
), xops
);
5084 /* Check second byte. */
5085 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
5086 output_asm_insn (AS1 (je
,%l11
), xops
);
5089 /* Check third byte. */
5090 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
5091 output_asm_insn (AS1 (je
,%l10
), xops
);
5093 /* Check fourth byte and increment address. */
5094 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
5095 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
5096 output_asm_insn (AS1 (jne
,%l8
), xops
);
5098 /* Now generate fixups when the compare stops within a 4-byte word. */
5099 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
5101 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
5102 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5104 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
5105 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5107 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));