1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
40 #ifdef EXTRA_CONSTRAINT
41 /* If EXTRA_CONSTRAINT is defined, then the 'S'
42 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
43 asm statements that need 'S' for class SIREG will break. */
44 error EXTRA_CONSTRAINT conflicts with S constraint letter
45 /* The previous line used to be #error, but some compilers barf
46 even if the conditional was untrue. */
49 #ifndef CHECK_STACK_LIMIT
50 #define CHECK_STACK_LIMIT -1
53 /* Type of an operand for ix86_{binary,unary}_operator_ok */
61 /* Processor costs (relative to an add) */
62 struct processor_costs i386_cost
= { /* 386 specific costs */
63 1, /* cost of an add instruction */
64 1, /* cost of a lea instruction */
65 3, /* variable shift costs */
66 2, /* constant shift costs */
67 6, /* cost of starting a multiply */
68 1, /* cost of multiply per each bit set */
69 23 /* cost of a divide/mod */
72 struct processor_costs i486_cost
= { /* 486 specific costs */
73 1, /* cost of an add instruction */
74 1, /* cost of a lea instruction */
75 3, /* variable shift costs */
76 2, /* constant shift costs */
77 12, /* cost of starting a multiply */
78 1, /* cost of multiply per each bit set */
79 40 /* cost of a divide/mod */
82 struct processor_costs pentium_cost
= {
83 1, /* cost of an add instruction */
84 1, /* cost of a lea instruction */
85 4, /* variable shift costs */
86 1, /* constant shift costs */
87 11, /* cost of starting a multiply */
88 0, /* cost of multiply per each bit set */
89 25 /* cost of a divide/mod */
92 struct processor_costs pentiumpro_cost
= {
93 1, /* cost of an add instruction */
94 1, /* cost of a lea instruction */
95 3, /* variable shift costs */
96 1, /* constant shift costs */
97 4, /* cost of starting a multiply */
98 0, /* cost of multiply per each bit set */
99 17 /* cost of a divide/mod */
102 struct processor_costs
*ix86_cost
= &pentium_cost
;
104 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
106 extern FILE *asm_out_file
;
107 extern char *strcat ();
109 static void ix86_epilogue
PROTO((int));
110 static void ix86_prologue
PROTO((int));
112 char *singlemove_string ();
113 char *output_move_const_single ();
114 char *output_fp_cc0_set ();
116 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
117 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
118 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
120 /* Array of the smallest class containing reg number REGNO, indexed by
121 REGNO. Used by REGNO_REG_CLASS in i386.h. */
123 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
126 AREG
, DREG
, CREG
, BREG
,
128 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
130 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
131 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
136 /* Test and compare insns in i386.md store the information needed to
137 generate branch and scc insns here. */
139 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
140 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
141 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
143 /* which cpu are we scheduling for */
144 enum processor_type ix86_cpu
;
146 /* which instruction set architecture to use. */
149 /* Strings to hold which cpu and instruction set architecture to use. */
150 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
151 char *ix86_arch_string
; /* for -march=<xxx> */
153 /* Register allocation order */
154 char *i386_reg_alloc_order
;
155 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
157 /* # of registers to use to pass arguments. */
158 char *i386_regparm_string
;
160 /* i386_regparm_string as a number */
163 /* Alignment to use for loops and jumps: */
165 /* Power of two alignment for loops. */
166 char *i386_align_loops_string
;
168 /* Power of two alignment for non-loop jumps. */
169 char *i386_align_jumps_string
;
171 /* Values 1-5: see jump.c */
172 int i386_branch_cost
;
173 char *i386_branch_cost_string
;
175 /* Power of two alignment for functions. */
176 int i386_align_funcs
;
177 char *i386_align_funcs_string
;
179 /* Power of two alignment for loops. */
180 int i386_align_loops
;
182 /* Power of two alignment for non-loop jumps. */
183 int i386_align_jumps
;
185 /* Sometimes certain combinations of command options do not make
186 sense on a particular target machine. You can define a macro
187 `OVERRIDE_OPTIONS' to take account of this. This macro, if
188 defined, is executed once just after all the command options have
191 Don't use this macro to turn on various extra optimizations for
192 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
203 char *name
; /* Canonical processor name. */
204 enum processor_type processor
; /* Processor type enum value. */
205 struct processor_costs
*cost
; /* Processor costs */
206 int target_enable
; /* Target flags to enable. */
207 int target_disable
; /* Target flags to disable. */
208 } processor_target_table
[]
209 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
210 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
211 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
212 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
213 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
,
215 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
,
216 &pentiumpro_cost
, 0, 0}};
218 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
220 #ifdef SUBTARGET_OVERRIDE_OPTIONS
221 SUBTARGET_OVERRIDE_OPTIONS
;
224 /* Validate registers in register allocation order. */
225 if (i386_reg_alloc_order
)
227 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
231 case 'a': regno
= 0; break;
232 case 'd': regno
= 1; break;
233 case 'c': regno
= 2; break;
234 case 'b': regno
= 3; break;
235 case 'S': regno
= 4; break;
236 case 'D': regno
= 5; break;
237 case 'B': regno
= 6; break;
239 default: fatal ("Register '%c' is unknown", ch
);
242 if (regs_allocated
[regno
])
243 fatal ("Register '%c' already specified in allocation order", ch
);
245 regs_allocated
[regno
] = 1;
249 if (ix86_arch_string
== 0)
251 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
252 if (ix86_cpu_string
== 0)
253 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
256 for (i
= 0; i
< ptt_size
; i
++)
257 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
259 ix86_arch
= processor_target_table
[i
].processor
;
260 if (ix86_cpu_string
== 0)
261 ix86_cpu_string
= processor_target_table
[i
].name
;
267 error ("bad value (%s) for -march= switch", ix86_arch_string
);
268 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
269 ix86_arch
= PROCESSOR_DEFAULT
;
272 if (ix86_cpu_string
== 0)
273 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
275 for (j
= 0; j
< ptt_size
; j
++)
276 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
278 ix86_cpu
= processor_target_table
[j
].processor
;
279 ix86_cost
= processor_target_table
[j
].cost
;
280 if (i
> j
&& (int) ix86_arch
>= (int) PROCESSOR_PENTIUMPRO
)
281 error ("-mcpu=%s does not support -march=%s",
282 ix86_cpu_string
, ix86_arch_string
);
284 target_flags
|= processor_target_table
[j
].target_enable
;
285 target_flags
&= ~processor_target_table
[j
].target_disable
;
291 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
292 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
293 ix86_cpu
= PROCESSOR_DEFAULT
;
296 /* Validate -mregparm= value. */
297 if (i386_regparm_string
)
299 i386_regparm
= atoi (i386_regparm_string
);
300 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
301 fatal ("-mregparm=%d is not between 0 and %d",
302 i386_regparm
, REGPARM_MAX
);
305 /* The 486 suffers more from non-aligned cache line fills, and the
306 larger code size results in a larger cache foot-print and more misses.
307 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
309 def_align
= (TARGET_486
) ? 4 : 2;
311 /* Validate -malign-loops= value, or provide default. */
312 if (i386_align_loops_string
)
314 i386_align_loops
= atoi (i386_align_loops_string
);
315 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
316 fatal ("-malign-loops=%d is not between 0 and %d",
317 i386_align_loops
, MAX_CODE_ALIGN
);
320 i386_align_loops
= 2;
322 /* Validate -malign-jumps= value, or provide default. */
323 if (i386_align_jumps_string
)
325 i386_align_jumps
= atoi (i386_align_jumps_string
);
326 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
327 fatal ("-malign-jumps=%d is not between 0 and %d",
328 i386_align_jumps
, MAX_CODE_ALIGN
);
331 i386_align_jumps
= def_align
;
333 /* Validate -malign-functions= value, or provide default. */
334 if (i386_align_funcs_string
)
336 i386_align_funcs
= atoi (i386_align_funcs_string
);
337 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
338 fatal ("-malign-functions=%d is not between 0 and %d",
339 i386_align_funcs
, MAX_CODE_ALIGN
);
342 i386_align_funcs
= def_align
;
344 /* Validate -mbranch-cost= value, or provide default. */
345 if (i386_branch_cost_string
)
347 i386_branch_cost
= atoi (i386_branch_cost_string
);
348 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
349 fatal ("-mbranch-cost=%d is not between 0 and 5",
353 i386_branch_cost
= 1;
355 /* Keep nonleaf frame pointers. */
356 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
357 flag_omit_frame_pointer
= 1;
360 /* A C statement (sans semicolon) to choose the order in which to
361 allocate hard registers for pseudo-registers local to a basic
364 Store the desired register order in the array `reg_alloc_order'.
365 Element 0 should be the register to allocate first; element 1, the
366 next register; and so on.
368 The macro body should not assume anything about the contents of
369 `reg_alloc_order' before execution of the macro.
371 On most machines, it is not necessary to define this macro. */
374 order_regs_for_local_alloc ()
376 int i
, ch
, order
, regno
;
378 /* User specified the register allocation order. */
380 if (i386_reg_alloc_order
)
382 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
386 case 'a': regno
= 0; break;
387 case 'd': regno
= 1; break;
388 case 'c': regno
= 2; break;
389 case 'b': regno
= 3; break;
390 case 'S': regno
= 4; break;
391 case 'D': regno
= 5; break;
392 case 'B': regno
= 6; break;
395 reg_alloc_order
[order
++] = regno
;
398 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
400 if (! regs_allocated
[i
])
401 reg_alloc_order
[order
++] = i
;
405 /* If user did not specify a register allocation order, use natural order. */
408 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
409 reg_alloc_order
[i
] = i
;
414 optimization_options (level
)
417 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
418 make the problem with not enough registers even worse. */
419 #ifdef INSN_SCHEDULING
421 flag_schedule_insns
= 0;
425 /* Sign-extend a 16-bit constant */
428 i386_sext16_if_const (op
)
431 if (GET_CODE (op
) == CONST_INT
)
433 HOST_WIDE_INT val
= INTVAL (op
);
434 HOST_WIDE_INT sext_val
;
436 sext_val
= val
| ~0xffff;
438 sext_val
= val
& 0xffff;
440 op
= GEN_INT (sext_val
);
445 /* Return nonzero if the rtx is aligned */
448 i386_aligned_reg_p (regno
)
451 return (regno
== STACK_POINTER_REGNUM
452 || (! flag_omit_frame_pointer
&& regno
== FRAME_POINTER_REGNUM
));
459 /* Registers and immediate operands are always "aligned". */
460 if (GET_CODE (op
) != MEM
)
463 /* Don't even try to do any aligned optimizations with volatiles. */
464 if (MEM_VOLATILE_P (op
))
467 /* Get address of memory operand. */
470 switch (GET_CODE (op
))
477 /* Match "reg + offset" */
479 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
481 if (INTVAL (XEXP (op
, 1)) & 3)
485 if (GET_CODE (op
) != REG
)
488 /* ... fall through ... */
491 return i386_aligned_reg_p (REGNO (op
));
497 /* Return nonzero if INSN looks like it won't compute useful cc bits
498 as a side effect. This information is only a hint. */
501 i386_cc_probably_useless_p (insn
)
504 return ! next_cc0_user (insn
);
507 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
508 attribute for DECL. The attributes in ATTRIBUTES have previously been
512 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
521 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
522 attribute for TYPE. The attributes in ATTRIBUTES have previously been
526 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
532 if (TREE_CODE (type
) != FUNCTION_TYPE
533 && TREE_CODE (type
) != FIELD_DECL
534 && TREE_CODE (type
) != TYPE_DECL
)
537 /* Stdcall attribute says callee is responsible for popping arguments
538 if they are not variable. */
539 if (is_attribute_p ("stdcall", identifier
))
540 return (args
== NULL_TREE
);
542 /* Cdecl attribute says the callee is a normal C declaration. */
543 if (is_attribute_p ("cdecl", identifier
))
544 return (args
== NULL_TREE
);
546 /* Regparm attribute specifies how many integer arguments are to be
547 passed in registers. */
548 if (is_attribute_p ("regparm", identifier
))
552 if (! args
|| TREE_CODE (args
) != TREE_LIST
553 || TREE_CHAIN (args
) != NULL_TREE
554 || TREE_VALUE (args
) == NULL_TREE
)
557 cst
= TREE_VALUE (args
);
558 if (TREE_CODE (cst
) != INTEGER_CST
)
561 if (TREE_INT_CST_HIGH (cst
) != 0
562 || TREE_INT_CST_LOW (cst
) < 0
563 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
572 /* Return 0 if the attributes for two types are incompatible, 1 if they
573 are compatible, and 2 if they are nearly compatible (which causes a
574 warning to be generated). */
577 i386_comp_type_attributes (type1
, type2
)
585 /* Value is the number of bytes of arguments automatically
586 popped when returning from a subroutine call.
587 FUNDECL is the declaration node of the function (as a tree),
588 FUNTYPE is the data type of the function (as a tree),
589 or for a library call it is an identifier node for the subroutine name.
590 SIZE is the number of bytes of arguments passed on the stack.
592 On the 80386, the RTD insn may be used to pop them if the number
593 of args is fixed, but if the number is variable then the caller
594 must pop them all. RTD can't be used for library calls now
595 because the library is compiled with the Unix compiler.
596 Use of RTD is a selectable option, since it is incompatible with
597 standard Unix calling sequences. If the option is not selected,
598 the caller must always pop the args.
600 The attribute stdcall is equivalent to RTD on a per module basis. */
603 i386_return_pops_args (fundecl
, funtype
, size
)
608 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
610 /* Cdecl functions override -mrtd, and never pop the stack. */
611 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
613 /* Stdcall functions will pop the stack if not variable args. */
614 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
618 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
619 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
)))
624 /* Lose any fake structure return argument. */
625 if (aggregate_value_p (TREE_TYPE (funtype
)))
626 return GET_MODE_SIZE (Pmode
);
632 /* Argument support functions. */
634 /* Initialize a variable CUM of type CUMULATIVE_ARGS
635 for a call to a function whose data type is FNTYPE.
636 For a library call, FNTYPE is 0. */
639 init_cumulative_args (cum
, fntype
, libname
)
640 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize */
641 tree fntype
; /* tree ptr for function decl */
642 rtx libname
; /* SYMBOL_REF of library name or 0 */
644 static CUMULATIVE_ARGS zero_cum
;
645 tree param
, next_param
;
647 if (TARGET_DEBUG_ARG
)
649 fprintf (stderr
, "\ninit_cumulative_args (");
651 fprintf (stderr
, "fntype code = %s, ret code = %s",
652 tree_code_name
[(int) TREE_CODE (fntype
)],
653 tree_code_name
[(int) TREE_CODE (TREE_TYPE (fntype
))]);
655 fprintf (stderr
, "no fntype");
658 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
663 /* Set up the number of registers to use for passing arguments. */
664 cum
->nregs
= i386_regparm
;
667 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
670 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
673 /* Determine if this function has variable arguments. This is
674 indicated by the last argument being 'void_type_mode' if there
675 are no variable arguments. If there are variable arguments, then
676 we won't pass anything in registers */
680 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
681 param
!= 0; param
= next_param
)
683 next_param
= TREE_CHAIN (param
);
684 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
689 if (TARGET_DEBUG_ARG
)
690 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
695 /* Update the data in CUM to advance over an argument
696 of mode MODE and data type TYPE.
697 (TYPE is null for libcalls where that information may not be available.) */
700 function_arg_advance (cum
, mode
, type
, named
)
701 CUMULATIVE_ARGS
*cum
; /* current arg information */
702 enum machine_mode mode
; /* current arg mode */
703 tree type
; /* type of the argument or 0 if lib support */
704 int named
; /* whether or not the argument was named */
707 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
708 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
710 if (TARGET_DEBUG_ARG
)
712 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
713 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
728 /* Define where to put the arguments to a function.
729 Value is zero to push the argument on the stack,
730 or a hard register in which to store the argument.
732 MODE is the argument's machine mode.
733 TYPE is the data type of the argument (as a tree).
734 This is null for libcalls where that information may
736 CUM is a variable of type CUMULATIVE_ARGS which gives info about
737 the preceding args and about the function being called.
738 NAMED is nonzero if this argument is a named parameter
739 (otherwise it is an extra parameter matching an ellipsis). */
742 function_arg (cum
, mode
, type
, named
)
743 CUMULATIVE_ARGS
*cum
; /* current arg information */
744 enum machine_mode mode
; /* current arg mode */
745 tree type
; /* type of the argument or 0 if lib support */
746 int named
; /* != 0 for normal args, == 0 for ... args */
750 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
751 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
755 /* For now, pass fp/complex values on the stack. */
764 if (words
<= cum
->nregs
)
765 ret
= gen_rtx (REG
, mode
, cum
->regno
);
769 if (TARGET_DEBUG_ARG
)
772 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
773 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
776 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
778 fprintf (stderr
, ", stack");
780 fprintf (stderr
, " )\n");
786 /* For an arg passed partly in registers and partly in memory,
787 this is the number of registers used.
788 For args passed entirely in registers or entirely in memory, zero. */
791 function_arg_partial_nregs (cum
, mode
, type
, named
)
792 CUMULATIVE_ARGS
*cum
; /* current arg information */
793 enum machine_mode mode
; /* current arg mode */
794 tree type
; /* type of the argument or 0 if lib support */
795 int named
; /* != 0 for normal args, == 0 for ... args */
800 /* Output an insn whose source is a 386 integer register. SRC is the
801 rtx for the register, and TEMPLATE is the op-code template. SRC may
802 be either SImode or DImode.
804 The template will be output with operands[0] as SRC, and operands[1]
805 as a pointer to the top of the 386 stack. So a call from floatsidf2
806 would look like this:
808 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
810 where %z0 corresponds to the caller's operands[1], and is used to
811 emit the proper size suffix.
813 ??? Extend this to handle HImode - a 387 can load and store HImode
817 output_op_from_reg (src
, template)
822 int size
= GET_MODE_SIZE (GET_MODE (src
));
825 xops
[1] = AT_SP (Pmode
);
826 xops
[2] = GEN_INT (size
);
827 xops
[3] = stack_pointer_rtx
;
829 if (size
> UNITS_PER_WORD
)
833 if (size
> 2 * UNITS_PER_WORD
)
835 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
836 output_asm_insn (AS1 (push
%L0
,%0), &high
);
839 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
840 output_asm_insn (AS1 (push
%L0
,%0), &high
);
843 output_asm_insn (AS1 (push
%L0
,%0), &src
);
844 output_asm_insn (template, xops
);
845 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
848 /* Output an insn to pop an value from the 387 top-of-stack to 386
849 register DEST. The 387 register stack is popped if DIES is true. If
850 the mode of DEST is an integer mode, a `fist' integer store is done,
851 otherwise a `fst' float store is done. */
854 output_to_reg (dest
, dies
, scratch_mem
)
860 int size
= GET_MODE_SIZE (GET_MODE (dest
));
863 xops
[0] = AT_SP (Pmode
);
865 xops
[0] = scratch_mem
;
867 xops
[1] = stack_pointer_rtx
;
868 xops
[2] = GEN_INT (size
);
872 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
874 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
877 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
879 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
882 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
885 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
888 if (GET_MODE (dest
) == XFmode
)
890 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
891 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
894 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
902 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
904 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
907 if (size
> UNITS_PER_WORD
)
909 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
911 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
914 xops
[0] = adj_offsettable_operand (xops
[0], 4);
916 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
919 if (size
> 2 * UNITS_PER_WORD
)
921 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
923 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
926 xops
[0] = adj_offsettable_operand (xops
[0], 4);
927 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
934 singlemove_string (operands
)
938 if (GET_CODE (operands
[0]) == MEM
939 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
941 if (XEXP (x
, 0) != stack_pointer_rtx
)
945 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
946 return output_move_const_single (operands
);
947 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
948 return AS2 (mov
%L0
,%1,%0);
949 else if (CONSTANT_P (operands
[1]))
950 return AS2 (mov
%L0
,%1,%0);
953 output_asm_insn ("push%L1 %1", operands
);
958 /* Return a REG that occurs in ADDR with coefficient 1.
959 ADDR can be effectively incremented by incrementing REG. */
965 while (GET_CODE (addr
) == PLUS
)
967 if (GET_CODE (XEXP (addr
, 0)) == REG
)
968 addr
= XEXP (addr
, 0);
969 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
970 addr
= XEXP (addr
, 1);
971 else if (CONSTANT_P (XEXP (addr
, 0)))
972 addr
= XEXP (addr
, 1);
973 else if (CONSTANT_P (XEXP (addr
, 1)))
974 addr
= XEXP (addr
, 0);
979 if (GET_CODE (addr
) == REG
)
984 /* Output an insn to add the constant N to the register X. */
995 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
997 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
998 else if (n
< 0 || n
== 128)
1000 xops
[1] = GEN_INT (-n
);
1001 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
1005 xops
[1] = GEN_INT (n
);
1006 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
1010 /* Output assembler code to perform a doubleword move insn
1011 with operands OPERANDS. */
1014 output_move_double (operands
)
1017 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
1021 rtx addreg0
= 0, addreg1
= 0;
1022 int dest_overlapped_low
= 0;
1023 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1028 /* First classify both operands. */
1030 if (REG_P (operands
[0]))
1032 else if (offsettable_memref_p (operands
[0]))
1034 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1036 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1038 else if (GET_CODE (operands
[0]) == MEM
)
1043 if (REG_P (operands
[1]))
1045 else if (CONSTANT_P (operands
[1]))
1047 else if (offsettable_memref_p (operands
[1]))
1049 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1051 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1053 else if (GET_CODE (operands
[1]) == MEM
)
1058 /* Check for the cases that the operand constraints are not
1059 supposed to allow to happen. Abort if we get one,
1060 because generating code for these cases is painful. */
1062 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1065 /* If one operand is decrementing and one is incrementing
1066 decrement the former register explicitly
1067 and change that operand into ordinary indexing. */
1069 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1071 /* ??? Can this ever happen on i386? */
1072 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1073 asm_add (-size
, operands
[0]);
1074 if (GET_MODE (operands
[1]) == XFmode
)
1075 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
1076 else if (GET_MODE (operands
[0]) == DFmode
)
1077 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
1079 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
1083 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1085 /* ??? Can this ever happen on i386? */
1086 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1087 asm_add (-size
, operands
[1]);
1088 if (GET_MODE (operands
[1]) == XFmode
)
1089 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
1090 else if (GET_MODE (operands
[1]) == DFmode
)
1091 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
1093 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
1097 /* If an operand is an unoffsettable memory ref, find a register
1098 we can increment temporarily to make it refer to the second word. */
1100 if (optype0
== MEMOP
)
1101 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1103 if (optype1
== MEMOP
)
1104 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1106 /* Ok, we can do one word at a time.
1107 Normally we do the low-numbered word first,
1108 but if either operand is autodecrementing then we
1109 do the high-numbered word first.
1111 In either case, set up in LATEHALF the operands to use
1112 for the high-numbered word and in some cases alter the
1113 operands in OPERANDS to be suitable for the low-numbered word. */
1117 if (optype0
== REGOP
)
1119 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1120 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
1122 else if (optype0
== OFFSOP
)
1124 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1125 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1129 middlehalf
[0] = operands
[0];
1130 latehalf
[0] = operands
[0];
1133 if (optype1
== REGOP
)
1135 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1136 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
1138 else if (optype1
== OFFSOP
)
1140 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1141 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1143 else if (optype1
== CNSTOP
)
1145 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1147 REAL_VALUE_TYPE r
; long l
[3];
1149 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1150 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1151 operands
[1] = GEN_INT (l
[0]);
1152 middlehalf
[1] = GEN_INT (l
[1]);
1153 latehalf
[1] = GEN_INT (l
[2]);
1155 else if (CONSTANT_P (operands
[1]))
1156 /* No non-CONST_DOUBLE constant should ever appear here. */
1161 middlehalf
[1] = operands
[1];
1162 latehalf
[1] = operands
[1];
1168 /* Size is not 12. */
1170 if (optype0
== REGOP
)
1171 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1172 else if (optype0
== OFFSOP
)
1173 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1175 latehalf
[0] = operands
[0];
1177 if (optype1
== REGOP
)
1178 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1179 else if (optype1
== OFFSOP
)
1180 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1181 else if (optype1
== CNSTOP
)
1182 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1184 latehalf
[1] = operands
[1];
1187 /* If insn is effectively movd N (sp),-(sp) then we will do the
1188 high word first. We should use the adjusted operand 1
1189 (which is N+4 (sp) or N+8 (sp))
1190 for the low word and middle word as well,
1191 to compensate for the first decrement of sp. */
1192 if (optype0
== PUSHOP
1193 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1194 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1195 middlehalf
[1] = operands
[1] = latehalf
[1];
1197 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1198 if the upper part of reg N does not appear in the MEM, arrange to
1199 emit the move late-half first. Otherwise, compute the MEM address
1200 into the upper part of N and use that as a pointer to the memory
1202 if (optype0
== REGOP
1203 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1205 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1206 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1208 /* If both halves of dest are used in the src memory address,
1209 compute the address into latehalf of dest. */
1211 xops
[0] = latehalf
[0];
1212 xops
[1] = XEXP (operands
[1], 0);
1213 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1214 if (GET_MODE (operands
[1]) == XFmode
)
1216 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1217 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1218 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1222 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1223 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1228 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1230 /* Check for two regs used by both source and dest. */
1231 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1232 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1235 /* JRV says this can't happen: */
1236 if (addreg0
|| addreg1
)
1239 /* Only the middle reg conflicts; simply put it last. */
1240 output_asm_insn (singlemove_string (operands
), operands
);
1241 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1242 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1246 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1247 /* If the low half of dest is mentioned in the source memory
1248 address, the arrange to emit the move late half first. */
1249 dest_overlapped_low
= 1;
1252 /* If one or both operands autodecrementing,
1253 do the two words, high-numbered first. */
1255 /* Likewise, the first move would clobber the source of the second one,
1256 do them in the other order. This happens only for registers;
1257 such overlap can't happen in memory unless the user explicitly
1258 sets it up, and that is an undefined circumstance. */
1261 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1262 || (optype0
== REGOP
&& optype1
== REGOP
1263 && REGNO (operands
[0]) == REGNO (latehalf
[1]))
1264 || dest_overlapped_low
)
1267 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1268 || (optype0
== REGOP
&& optype1
== REGOP
1269 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1270 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1271 || dest_overlapped_low
)
1273 /* Make any unoffsettable addresses point at high-numbered word. */
1275 asm_add (size
-4, addreg0
);
1277 asm_add (size
-4, addreg1
);
1280 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1282 /* Undo the adds we just did. */
1284 asm_add (-4, addreg0
);
1286 asm_add (-4, addreg1
);
1290 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1292 asm_add (-4, addreg0
);
1294 asm_add (-4, addreg1
);
1297 /* Do low-numbered word. */
1298 return singlemove_string (operands
);
1301 /* Normal case: do the two words, low-numbered first. */
1303 output_asm_insn (singlemove_string (operands
), operands
);
1305 /* Do the middle one of the three words for long double */
1309 asm_add (4, addreg0
);
1311 asm_add (4, addreg1
);
1313 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1316 /* Make any unoffsettable addresses point at high-numbered word. */
1318 asm_add (4, addreg0
);
1320 asm_add (4, addreg1
);
1323 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1325 /* Undo the adds we just did. */
1327 asm_add (4-size
, addreg0
);
1329 asm_add (4-size
, addreg1
);
1334 #define MAX_TMPS 2 /* max temporary registers used */
1336 /* Output the appropriate code to move push memory on the stack */
1339 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1351 } tmp_info
[MAX_TMPS
];
1353 rtx src
= operands
[1];
1356 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1357 int stack_offset
= 0;
1361 if (! offsettable_memref_p (src
))
1362 fatal_insn ("Source is not offsettable", insn
);
1364 if ((length
& 3) != 0)
1365 fatal_insn ("Pushing non-word aligned size", insn
);
1367 /* Figure out which temporary registers we have available */
1368 for (i
= tmp_start
; i
< n_operands
; i
++)
1370 if (GET_CODE (operands
[i
]) == REG
)
1372 if (reg_overlap_mentioned_p (operands
[i
], src
))
1375 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1376 if (max_tmps
== MAX_TMPS
)
1382 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1384 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1385 output_asm_insn (AS1(push
%L0
,%0), xops
);
1391 for (offset
= length
- 4; offset
>= 0; )
1393 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1395 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1396 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1397 tmp_info
[num_tmps
].xops
[0]
1398 = adj_offsettable_operand (src
, offset
+ stack_offset
);
1402 for (i
= 0; i
< num_tmps
; i
++)
1403 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1405 for (i
= 0; i
< num_tmps
; i
++)
1406 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1409 stack_offset
+= 4*num_tmps
;
1415 /* Output the appropriate code to move data between two memory locations */
1418 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1430 } tmp_info
[MAX_TMPS
];
1432 rtx dest
= operands
[0];
1433 rtx src
= operands
[1];
1434 rtx qi_tmp
= NULL_RTX
;
1440 if (GET_CODE (dest
) == MEM
1441 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1442 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1443 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1445 if (! offsettable_memref_p (src
))
1446 fatal_insn ("Source is not offsettable", insn
);
1448 if (! offsettable_memref_p (dest
))
1449 fatal_insn ("Destination is not offsettable", insn
);
1451 /* Figure out which temporary registers we have available */
1452 for (i
= tmp_start
; i
< n_operands
; i
++)
1454 if (GET_CODE (operands
[i
]) == REG
)
1456 if ((length
& 1) != 0 && qi_tmp
== 0 && QI_REG_P (operands
[i
]))
1457 qi_tmp
= operands
[i
];
1459 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1460 fatal_insn ("Temporary register overlaps the destination", insn
);
1462 if (reg_overlap_mentioned_p (operands
[i
], src
))
1463 fatal_insn ("Temporary register overlaps the source", insn
);
1465 tmp_info
[max_tmps
++].xops
[2] = operands
[i
];
1466 if (max_tmps
== MAX_TMPS
)
1472 fatal_insn ("No scratch registers were found to do memory->memory moves",
1475 if ((length
& 1) != 0)
1478 fatal_insn ("No byte register found when moving odd # of bytes.",
1484 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1488 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1489 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1490 tmp_info
[num_tmps
].xops
[0]
1491 = adj_offsettable_operand (dest
, offset
);
1492 tmp_info
[num_tmps
].xops
[1]
1493 = adj_offsettable_operand (src
, offset
);
1499 else if (length
>= 2)
1501 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1502 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1503 tmp_info
[num_tmps
].xops
[0]
1504 = adj_offsettable_operand (dest
, offset
);
1505 tmp_info
[num_tmps
].xops
[1]
1506 = adj_offsettable_operand (src
, offset
);
1515 for (i
= 0; i
< num_tmps
; i
++)
1516 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1518 for (i
= 0; i
< num_tmps
; i
++)
1519 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1524 xops
[0] = adj_offsettable_operand (dest
, offset
);
1525 xops
[1] = adj_offsettable_operand (src
, offset
);
1527 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1528 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1535 standard_80387_constant_p (x
)
1538 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1543 if (setjmp (handler
))
1546 set_float_handler (handler
);
1547 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1548 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1549 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1550 set_float_handler (NULL_PTR
);
1558 /* Note that on the 80387, other constants, such as pi,
1559 are much slower to load as standard constants
1560 than to load from doubles in memory! */
1567 output_move_const_single (operands
)
1570 if (FP_REG_P (operands
[0]))
1572 int conval
= standard_80387_constant_p (operands
[1]);
1581 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1583 REAL_VALUE_TYPE r
; long l
;
1585 if (GET_MODE (operands
[1]) == XFmode
)
1588 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1589 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1590 operands
[1] = GEN_INT (l
);
1593 return singlemove_string (operands
);
1596 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1597 reference and a constant. */
1600 symbolic_operand (op
, mode
)
1602 enum machine_mode mode
;
1604 switch (GET_CODE (op
))
1612 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1613 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1614 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1621 /* Test for a valid operand for a call instruction.
1622 Don't allow the arg pointer register or virtual regs
1623 since they may change into reg + const, which the patterns
1624 can't handle yet. */
1627 call_insn_operand (op
, mode
)
1629 enum machine_mode mode
;
1631 if (GET_CODE (op
) == MEM
1632 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1633 /* This makes a difference for PIC. */
1634 && general_operand (XEXP (op
, 0), Pmode
))
1635 || (GET_CODE (XEXP (op
, 0)) == REG
1636 && XEXP (op
, 0) != arg_pointer_rtx
1637 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1638 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1644 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1648 expander_call_insn_operand (op
, mode
)
1650 enum machine_mode mode
;
1652 if (GET_CODE (op
) == MEM
1653 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1654 || (GET_CODE (XEXP (op
, 0)) == REG
1655 && XEXP (op
, 0) != arg_pointer_rtx
1656 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1657 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1663 /* Return 1 if OP is a comparison operator that can use the condition code
1664 generated by an arithmetic operation. */
1667 arithmetic_comparison_operator (op
, mode
)
1669 enum machine_mode mode
;
1673 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1676 code
= GET_CODE (op
);
1677 if (GET_RTX_CLASS (code
) != '<')
1680 return (code
!= GT
&& code
!= LE
);
1683 /* Returns 1 if OP contains a symbol reference */
1686 symbolic_reference_mentioned_p (op
)
1692 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1695 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1696 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1702 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1703 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1707 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1714 /* Attempt to expand a binary operator. Make the expansion closer to the
1715 actual machine, then just general_operand, which will allow 3 separate
1716 memory references (one output, two input) in a single insn. Return
1717 whether the insn fails, or succeeds. */
1720 ix86_expand_binary_operator (code
, mode
, operands
)
1722 enum machine_mode mode
;
1729 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1730 if (GET_RTX_CLASS (code
) == 'c'
1731 && (rtx_equal_p (operands
[0], operands
[2])
1732 || immediate_operand (operands
[1], mode
)))
1734 rtx temp
= operands
[1];
1735 operands
[1] = operands
[2];
1739 /* If optimizing, copy to regs to improve CSE */
1740 if (TARGET_PSEUDO
&& optimize
1741 && ((reload_in_progress
| reload_completed
) == 0))
1743 if (GET_CODE (operands
[1]) == MEM
1744 && ! rtx_equal_p (operands
[0], operands
[1]))
1745 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1747 if (GET_CODE (operands
[2]) == MEM
)
1748 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1750 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1752 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1754 emit_move_insn (temp
, operands
[1]);
1760 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1762 /* If not optimizing, try to make a valid insn (optimize code
1763 previously did this above to improve chances of CSE) */
1765 if ((! TARGET_PSEUDO
|| !optimize
)
1766 && ((reload_in_progress
| reload_completed
) == 0)
1767 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1770 if (GET_CODE (operands
[1]) == MEM
1771 && ! rtx_equal_p (operands
[0], operands
[1]))
1773 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1777 if (GET_CODE (operands
[2]) == MEM
)
1779 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1783 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1785 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1787 emit_move_insn (temp
, operands
[1]);
1792 if (modified
&& ! ix86_binary_operator_ok (code
, mode
, operands
))
1802 /* Return TRUE or FALSE depending on whether the binary operator meets the
1803 appropriate constraints. */
1806 ix86_binary_operator_ok (code
, mode
, operands
)
1808 enum machine_mode mode
;
1811 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1812 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1815 /* Attempt to expand a unary operator. Make the expansion closer to the
1816 actual machine, then just general_operand, which will allow 2 separate
1817 memory references (one output, one input) in a single insn. Return
1818 whether the insn fails, or succeeds. */
1821 ix86_expand_unary_operator (code
, mode
, operands
)
1823 enum machine_mode mode
;
1828 /* If optimizing, copy to regs to improve CSE */
1831 && ((reload_in_progress
| reload_completed
) == 0)
1832 && GET_CODE (operands
[1]) == MEM
)
1833 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1835 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1837 if ((! TARGET_PSEUDO
|| optimize
== 0)
1838 && ((reload_in_progress
| reload_completed
) == 0)
1839 && GET_CODE (operands
[1]) == MEM
)
1841 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1842 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1852 /* Return TRUE or FALSE depending on whether the unary operator meets the
1853 appropriate constraints. */
1856 ix86_unary_operator_ok (code
, mode
, operands
)
1858 enum machine_mode mode
;
1864 static rtx pic_label_rtx
;
1865 static char pic_label_name
[256];
1866 static int pic_label_no
= 0;
1868 /* This function generates code for -fpic that loads %ebx with
1869 with the return address of the caller and then returns. */
1872 asm_output_function_prefix (file
, name
)
1877 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1878 || current_function_uses_const_pool
);
1879 xops
[0] = pic_offset_table_rtx
;
1880 xops
[1] = stack_pointer_rtx
;
1882 /* Deep branch prediction favors having a return for every call. */
1883 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1887 if (pic_label_rtx
== 0)
1889 pic_label_rtx
= gen_label_rtx ();
1890 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1891 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1894 prologue_node
= make_node (FUNCTION_DECL
);
1895 DECL_RESULT (prologue_node
) = 0;
1896 #ifdef ASM_DECLARE_FUNCTION_NAME
1897 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1899 output_asm_insn ("movl (%1),%0", xops
);
1900 output_asm_insn ("ret", xops
);
1904 /* Generate the assembly code for function entry.
1905 FILE is an stdio stream to output the code to.
1906 SIZE is an int: how many units of temporary storage to allocate. */
1909 function_prologue (file
, size
)
1913 if (TARGET_SCHEDULE_PROLOGUE
)
1922 /* Expand the prologue into a bunch of separate insns. */
1925 ix86_expand_prologue ()
1927 if (! TARGET_SCHEDULE_PROLOGUE
)
1934 load_pic_register (do_rtl
)
1939 if (TARGET_DEEP_BRANCH_PREDICTION
)
1941 xops
[0] = pic_offset_table_rtx
;
1942 if (pic_label_rtx
== 0)
1944 pic_label_rtx
= gen_label_rtx ();
1945 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1946 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1949 xops
[1] = gen_rtx (MEM
, QImode
,
1950 gen_rtx (SYMBOL_REF
, Pmode
,
1951 LABEL_NAME (pic_label_rtx
)));
1955 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
1956 emit_insn (gen_prologue_set_got (xops
[0],
1957 gen_rtx (SYMBOL_REF
, Pmode
,
1958 "$_GLOBAL_OFFSET_TABLE_"),
1963 output_asm_insn (AS1 (call
,%P1
), xops
);
1964 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1971 xops
[0] = pic_offset_table_rtx
;
1972 xops
[1] = gen_label_rtx ();
1976 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
1977 a new CODE_LABEL after reload, so we need a single pattern to
1978 emit the 3 necessary instructions. */
1979 emit_insn (gen_prologue_get_pc_and_set_got (xops
[0]));
1983 output_asm_insn (AS1 (call
,%P1
), xops
);
1984 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
1985 CODE_LABEL_NUMBER (xops
[1]));
1986 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1987 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1991 /* When -fpic, we must emit a scheduling barrier, so that the instruction
1992 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
1993 moved before any instruction which implicitly uses the got. */
1996 emit_insn (gen_blockage ());
2000 ix86_prologue (do_rtl
)
2006 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2007 || current_function_uses_const_pool
);
2008 long tsize
= get_frame_size ();
2010 int cfa_offset
= INCOMING_FRAME_SP_OFFSET
, cfa_store_offset
= cfa_offset
;
2012 xops
[0] = stack_pointer_rtx
;
2013 xops
[1] = frame_pointer_rtx
;
2014 xops
[2] = GEN_INT (tsize
);
2016 if (frame_pointer_needed
)
2020 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2021 gen_rtx (MEM
, SImode
,
2022 gen_rtx (PRE_DEC
, SImode
,
2023 stack_pointer_rtx
)),
2024 frame_pointer_rtx
));
2026 RTX_FRAME_RELATED_P (insn
) = 1;
2027 insn
= emit_move_insn (xops
[1], xops
[0]);
2028 RTX_FRAME_RELATED_P (insn
) = 1;
2033 output_asm_insn ("push%L1 %1", xops
);
2034 #ifdef INCOMING_RETURN_ADDR_RTX
2035 if (dwarf2out_do_frame ())
2037 char *l
= dwarf2out_cfi_label ();
2039 cfa_store_offset
+= 4;
2040 cfa_offset
= cfa_store_offset
;
2041 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2042 dwarf2out_reg_save (l
, FRAME_POINTER_REGNUM
, - cfa_store_offset
);
2046 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
2047 #ifdef INCOMING_RETURN_ADDR_RTX
2048 if (dwarf2out_do_frame ())
2049 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM
, cfa_offset
);
2056 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
2060 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
2061 RTX_FRAME_RELATED_P (insn
) = 1;
2065 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
2066 #ifdef INCOMING_RETURN_ADDR_RTX
2067 if (dwarf2out_do_frame ())
2069 cfa_store_offset
+= tsize
;
2070 if (! frame_pointer_needed
)
2072 cfa_offset
= cfa_store_offset
;
2073 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, cfa_offset
);
2081 xops
[3] = gen_rtx (REG
, SImode
, 0);
2083 emit_move_insn (xops
[3], xops
[2]);
2085 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
2087 xops
[3] = gen_rtx (MEM
, FUNCTION_MODE
,
2088 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
2091 emit_call_insn (gen_rtx (CALL
, VOIDmode
, xops
[3], const0_rtx
));
2093 output_asm_insn (AS1 (call
,%P3
), xops
);
2096 /* Note If use enter it is NOT reversed args.
2097 This one is not reversed from intel!!
2098 I think enter is slower. Also sdb doesn't like it.
2099 But if you want it the code is:
2101 xops[3] = const0_rtx;
2102 output_asm_insn ("enter %2,%3", xops);
2106 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2107 for (regno
= limit
- 1; regno
>= 0; regno
--)
2108 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2109 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2111 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2114 insn
= emit_insn (gen_rtx (SET
, VOIDmode
,
2115 gen_rtx (MEM
, SImode
,
2116 gen_rtx (PRE_DEC
, SImode
,
2117 stack_pointer_rtx
)),
2120 RTX_FRAME_RELATED_P (insn
) = 1;
2124 output_asm_insn ("push%L0 %0", xops
);
2125 #ifdef INCOMING_RETURN_ADDR_RTX
2126 if (dwarf2out_do_frame ())
2128 char *l
= dwarf2out_cfi_label ();
2130 cfa_store_offset
+= 4;
2131 if (! frame_pointer_needed
)
2133 cfa_offset
= cfa_store_offset
;
2134 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2137 dwarf2out_reg_save (l
, regno
, - cfa_store_offset
);
2144 load_pic_register (do_rtl
);
2146 /* If we are profiling, make sure no instructions are scheduled before
2147 the call to mcount. However, if -fpic, the above call will have
2149 if ((profile_flag
|| profile_block_flag
)
2150 && ! pic_reg_used
&& do_rtl
)
2151 emit_insn (gen_blockage ());
2154 /* Return 1 if it is appropriate to emit `ret' instructions in the
2155 body of a function. Do this only if the epilogue is simple, needing a
2156 couple of insns. Prior to reloading, we can't tell how many registers
2157 must be saved, so return 0 then. Return 0 if there is no frame
2158 marker to de-allocate.
2160 If NON_SAVING_SETJMP is defined and true, then it is not possible
2161 for the epilogue to be simple, so return 0. This is a special case
2162 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2163 until final, but jump_optimize may need to know sooner if a
2167 ix86_can_use_return_insn_p ()
2171 int reglimit
= (frame_pointer_needed
2172 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2173 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2174 || current_function_uses_const_pool
);
2176 #ifdef NON_SAVING_SETJMP
2177 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2181 if (! reload_completed
)
2184 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2185 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2186 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2189 return nregs
== 0 || ! frame_pointer_needed
;
2192 /* This function generates the assembly code for function exit.
2193 FILE is an stdio stream to output the code to.
2194 SIZE is an int: how many units of temporary storage to deallocate. */
2197 function_epilogue (file
, size
)
2204 /* Restore function stack, frame, and registers. */
2207 ix86_expand_epilogue ()
2213 ix86_epilogue (do_rtl
)
2217 register int nregs
, limit
;
2220 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2221 || current_function_uses_const_pool
);
2222 long tsize
= get_frame_size ();
2224 /* Compute the number of registers to pop */
2226 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2230 for (regno
= limit
- 1; regno
>= 0; regno
--)
2231 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2232 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2235 /* sp is often unreliable so we must go off the frame pointer.
2237 In reality, we may not care if sp is unreliable, because we can restore
2238 the register relative to the frame pointer. In theory, since each move
2239 is the same speed as a pop, and we don't need the leal, this is faster.
2240 For now restore multiple registers the old way. */
2242 offset
= - tsize
- (nregs
* UNITS_PER_WORD
);
2244 xops
[2] = stack_pointer_rtx
;
2246 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2247 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2248 moved before any instruction which implicitly uses the got. This
2249 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2251 Alternatively, this could be fixed by making the dependence on the
2252 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2254 if (flag_pic
|| profile_flag
|| profile_block_flag
)
2255 emit_insn (gen_blockage ());
2257 if (nregs
> 1 || ! frame_pointer_needed
)
2259 if (frame_pointer_needed
)
2261 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2263 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2265 output_asm_insn (AS2 (lea
%L2
,%0,%2), xops
);
2268 for (regno
= 0; regno
< limit
; regno
++)
2269 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2270 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2272 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2275 emit_insn (gen_pop (xops
[0]));
2277 output_asm_insn ("pop%L0 %0", xops
);
2282 for (regno
= 0; regno
< limit
; regno
++)
2283 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2284 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2286 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2287 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2290 emit_move_insn (xops
[0], xops
[1]);
2292 output_asm_insn (AS2 (mov
%L0
,%1,%0), xops
);
2297 if (frame_pointer_needed
)
2299 /* If not an i386, mov & pop is faster than "leave". */
2301 if (TARGET_USE_LEAVE
)
2304 emit_insn (gen_leave());
2306 output_asm_insn ("leave", xops
);
2310 xops
[0] = frame_pointer_rtx
;
2311 xops
[1] = stack_pointer_rtx
;
2315 emit_insn (gen_epilogue_set_stack_ptr());
2316 emit_insn (gen_pop (xops
[0]));
2320 output_asm_insn (AS2 (mov
%L2
,%0,%2), xops
);
2321 output_asm_insn ("pop%L0 %0", xops
);
2328 /* If there is no frame pointer, we must still release the frame. */
2329 xops
[0] = GEN_INT (tsize
);
2332 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2333 gen_rtx (PLUS
, SImode
, xops
[2], xops
[0])));
2335 output_asm_insn (AS2 (add
%L2
,%0,%2), xops
);
2338 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2339 if (profile_block_flag
== 2)
2341 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2345 if (current_function_pops_args
&& current_function_args_size
)
2347 xops
[1] = GEN_INT (current_function_pops_args
);
2349 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2350 asked to pop more, pop return address, do explicit add, and jump
2351 indirectly to the caller. */
2353 if (current_function_pops_args
>= 32768)
2355 /* ??? Which register to use here? */
2356 xops
[0] = gen_rtx (REG
, SImode
, 2);
2360 emit_insn (gen_pop (xops
[0]));
2361 emit_insn (gen_rtx (SET
, VOIDmode
, xops
[2],
2362 gen_rtx (PLUS
, SImode
, xops
[1], xops
[2])));
2363 emit_jump_insn (xops
[0]);
2367 output_asm_insn ("pop%L0 %0", xops
);
2368 output_asm_insn (AS2 (add
%L2
,%1,%2), xops
);
2369 output_asm_insn ("jmp %*%0", xops
);
2375 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2377 output_asm_insn ("ret %1", xops
);
2383 emit_jump_insn (gen_return_internal ());
2385 output_asm_insn ("ret", xops
);
2389 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2390 that is a valid memory address for an instruction.
2391 The MODE argument is the machine mode for the MEM expression
2392 that wants to use this address.
2394 On x86, legitimate addresses are:
2395 base movl (base),reg
2396 displacement movl disp,reg
2397 base + displacement movl disp(base),reg
2398 index + base movl (base,index),reg
2399 (index + base) + displacement movl disp(base,index),reg
2400 index*scale movl (,index,scale),reg
2401 index*scale + disp movl disp(,index,scale),reg
2402 index*scale + base movl (base,index,scale),reg
2403 (index*scale + base) + disp movl disp(base,index,scale),reg
2405 In each case, scale can be 1, 2, 4, 8. */
2407 /* This is exactly the same as print_operand_addr, except that
2408 it recognizes addresses instead of printing them.
2410 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2411 convert common non-canonical forms to canonical form so that they will
2414 #define ADDR_INVALID(msg,insn) \
2416 if (TARGET_DEBUG_ADDR) \
2418 fprintf (stderr, msg); \
2424 legitimate_address_p (mode
, addr
, strict
)
2425 enum machine_mode mode
;
2429 rtx base
= NULL_RTX
;
2430 rtx indx
= NULL_RTX
;
2431 rtx scale
= NULL_RTX
;
2432 rtx disp
= NULL_RTX
;
2434 if (TARGET_DEBUG_ADDR
)
2437 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2438 GET_MODE_NAME (mode
), strict
);
2443 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2446 else if (GET_CODE (addr
) == PLUS
)
2448 rtx op0
= XEXP (addr
, 0);
2449 rtx op1
= XEXP (addr
, 1);
2450 enum rtx_code code0
= GET_CODE (op0
);
2451 enum rtx_code code1
= GET_CODE (op1
);
2453 if (code0
== REG
|| code0
== SUBREG
)
2455 if (code1
== REG
|| code1
== SUBREG
)
2457 indx
= op0
; /* index + base */
2463 base
= op0
; /* base + displacement */
2468 else if (code0
== MULT
)
2470 indx
= XEXP (op0
, 0);
2471 scale
= XEXP (op0
, 1);
2473 if (code1
== REG
|| code1
== SUBREG
)
2474 base
= op1
; /* index*scale + base */
2477 disp
= op1
; /* index*scale + disp */
2480 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2482 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2483 scale
= XEXP (XEXP (op0
, 0), 1);
2484 base
= XEXP (op0
, 1);
2488 else if (code0
== PLUS
)
2490 indx
= XEXP (op0
, 0); /* index + base + disp */
2491 base
= XEXP (op0
, 1);
2497 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2502 else if (GET_CODE (addr
) == MULT
)
2504 indx
= XEXP (addr
, 0); /* index*scale */
2505 scale
= XEXP (addr
, 1);
2509 disp
= addr
; /* displacement */
2511 /* Allow arg pointer and stack pointer as index if there is not scaling */
2512 if (base
&& indx
&& !scale
2513 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2520 /* Validate base register:
2522 Don't allow SUBREG's here, it can lead to spill failures when the base
2523 is one word out of a two word structure, which is represented internally
2528 if (GET_CODE (base
) != REG
)
2530 ADDR_INVALID ("Base is not a register.\n", base
);
2534 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
2535 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2537 ADDR_INVALID ("Base is not valid.\n", base
);
2542 /* Validate index register:
2544 Don't allow SUBREG's here, it can lead to spill failures when the index
2545 is one word out of a two word structure, which is represented internally
2549 if (GET_CODE (indx
) != REG
)
2551 ADDR_INVALID ("Index is not a register.\n", indx
);
2555 if ((strict
&& ! REG_OK_FOR_INDEX_STRICT_P (indx
))
2556 || (! strict
&& ! REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2558 ADDR_INVALID ("Index is not valid.\n", indx
);
2563 abort (); /* scale w/o index invalid */
2565 /* Validate scale factor: */
2568 HOST_WIDE_INT value
;
2570 if (GET_CODE (scale
) != CONST_INT
)
2572 ADDR_INVALID ("Scale is not valid.\n", scale
);
2576 value
= INTVAL (scale
);
2577 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2579 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2584 /* Validate displacement
2585 Constant pool addresses must be handled special. They are
2586 considered legitimate addresses, but only if not used with regs.
2587 When printed, the output routines know to print the reference with the
2588 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2591 if (GET_CODE (disp
) == SYMBOL_REF
2592 && CONSTANT_POOL_ADDRESS_P (disp
)
2597 else if (!CONSTANT_ADDRESS_P (disp
))
2599 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2603 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2605 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2609 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2610 && base
!= pic_offset_table_rtx
2611 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2613 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2617 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2618 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2620 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2626 if (TARGET_DEBUG_ADDR
)
2627 fprintf (stderr
, "Address is valid.\n");
2629 /* Everything looks valid, return true */
2633 /* Return a legitimate reference for ORIG (an address) using the
2634 register REG. If REG is 0, a new pseudo is generated.
2636 There are three types of references that must be handled:
2638 1. Global data references must load the address from the GOT, via
2639 the PIC reg. An insn is emitted to do this load, and the reg is
2642 2. Static data references must compute the address as an offset
2643 from the GOT, whose base is in the PIC reg. An insn is emitted to
2644 compute the address into a reg, and the reg is returned. Static
2645 data objects have SYMBOL_REF_FLAG set to differentiate them from
2646 global data objects.
2648 3. Constant pool addresses must be handled special. They are
2649 considered legitimate addresses, but only if not used with regs.
2650 When printed, the output routines know to print the reference with the
2651 PIC reg, even though the PIC reg doesn't appear in the RTL.
2653 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2654 reg also appears in the address (except for constant pool references,
2657 "switch" statements also require special handling when generating
2658 PIC code. See comments by the `casesi' insn in i386.md for details. */
2661 legitimize_pic_address (orig
, reg
)
2668 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2670 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2675 reg
= gen_reg_rtx (Pmode
);
2677 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2678 || GET_CODE (addr
) == LABEL_REF
)
2679 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2681 new = gen_rtx (MEM
, Pmode
,
2682 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
));
2684 emit_move_insn (reg
, new);
2686 current_function_uses_pic_offset_table
= 1;
2690 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2694 if (GET_CODE (addr
) == CONST
)
2696 addr
= XEXP (addr
, 0);
2697 if (GET_CODE (addr
) != PLUS
)
2701 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2705 reg
= gen_reg_rtx (Pmode
);
2707 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2708 addr
= legitimize_pic_address (XEXP (addr
, 1),
2709 base
== reg
? NULL_RTX
: reg
);
2711 if (GET_CODE (addr
) == CONST_INT
)
2712 return plus_constant (base
, INTVAL (addr
));
2714 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2716 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2717 addr
= XEXP (addr
, 1);
2720 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2725 /* Emit insns to move operands[1] into operands[0]. */
2728 emit_pic_move (operands
, mode
)
2730 enum machine_mode mode
;
2732 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2734 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2735 operands
[1] = force_reg (SImode
, operands
[1]);
2737 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2740 /* Try machine-dependent ways of modifying an illegitimate address
2741 to be legitimate. If we find one, return the new, valid address.
2742 This macro is used in only one place: `memory_address' in explow.c.
2744 OLDX is the address as it was before break_out_memory_refs was called.
2745 In some cases it is useful to look at this to decide what needs to be done.
2747 MODE and WIN are passed so that this macro can use
2748 GO_IF_LEGITIMATE_ADDRESS.
2750 It is always safe for this macro to do nothing. It exists to recognize
2751 opportunities to optimize the output.
2753 For the 80386, we handle X+REG by loading X into a register R and
2754 using R+REG. R will go in a general reg and indexing will be used.
2755 However, if REG is a broken-out memory address or multiplication,
2756 nothing needs to be done because REG can certainly go in a general reg.
2758 When -fpic is used, special handling is needed for symbolic references.
2759 See comments by legitimize_pic_address in i386.c for details. */
2762 legitimize_address (x
, oldx
, mode
)
2765 enum machine_mode mode
;
2770 if (TARGET_DEBUG_ADDR
)
2772 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2773 GET_MODE_NAME (mode
));
2777 if (flag_pic
&& SYMBOLIC_CONST (x
))
2778 return legitimize_pic_address (x
, 0);
2780 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2781 if (GET_CODE (x
) == ASHIFT
2782 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2783 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2786 x
= gen_rtx (MULT
, Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2787 GEN_INT (1 << log
));
2790 if (GET_CODE (x
) == PLUS
)
2792 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2794 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2795 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2796 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2799 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2800 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2801 GEN_INT (1 << log
));
2804 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2805 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2806 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2809 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2810 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2811 GEN_INT (1 << log
));
2814 /* Put multiply first if it isn't already. */
2815 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2817 rtx tmp
= XEXP (x
, 0);
2818 XEXP (x
, 0) = XEXP (x
, 1);
2823 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2824 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2825 created by virtual register instantiation, register elimination, and
2826 similar optimizations. */
2827 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2830 x
= gen_rtx (PLUS
, Pmode
,
2831 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0),
2832 XEXP (XEXP (x
, 1), 0)),
2833 XEXP (XEXP (x
, 1), 1));
2837 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2838 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2839 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2840 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2841 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2842 && CONSTANT_P (XEXP (x
, 1)))
2844 rtx constant
, other
;
2846 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2848 constant
= XEXP (x
, 1);
2849 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2851 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2853 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2854 other
= XEXP (x
, 1);
2862 x
= gen_rtx (PLUS
, Pmode
,
2863 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2864 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2865 plus_constant (other
, INTVAL (constant
)));
2869 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2872 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2875 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2878 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2881 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2885 && GET_CODE (XEXP (x
, 1)) == REG
2886 && GET_CODE (XEXP (x
, 0)) == REG
)
2889 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2892 x
= legitimize_pic_address (x
, 0);
2895 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2898 if (GET_CODE (XEXP (x
, 0)) == REG
)
2900 register rtx temp
= gen_reg_rtx (Pmode
);
2901 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2903 emit_move_insn (temp
, val
);
2909 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2911 register rtx temp
= gen_reg_rtx (Pmode
);
2912 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2914 emit_move_insn (temp
, val
);
2924 /* Print an integer constant expression in assembler syntax. Addition
2925 and subtraction are the only arithmetic that may appear in these
2926 expressions. FILE is the stdio stream to write to, X is the rtx, and
2927 CODE is the operand print code from the output string. */
2930 output_pic_addr_const (file
, x
, code
)
2937 switch (GET_CODE (x
))
2948 if (GET_CODE (x
) == SYMBOL_REF
)
2949 assemble_name (file
, XSTR (x
, 0));
2952 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2953 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2954 assemble_name (asm_out_file
, buf
);
2957 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2958 fprintf (file
, "@GOTOFF(%%ebx)");
2959 else if (code
== 'P')
2960 fprintf (file
, "@PLT");
2961 else if (GET_CODE (x
) == LABEL_REF
)
2962 fprintf (file
, "@GOTOFF");
2963 else if (! SYMBOL_REF_FLAG (x
))
2964 fprintf (file
, "@GOT");
2966 fprintf (file
, "@GOTOFF");
2971 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2972 assemble_name (asm_out_file
, buf
);
2976 fprintf (file
, "%d", INTVAL (x
));
2980 /* This used to output parentheses around the expression,
2981 but that does not work on the 386 (either ATT or BSD assembler). */
2982 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2986 if (GET_MODE (x
) == VOIDmode
)
2988 /* We can use %d if the number is <32 bits and positive. */
2989 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2990 fprintf (file
, "0x%x%08x",
2991 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2993 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2996 /* We can't handle floating point constants;
2997 PRINT_OPERAND must handle them. */
2998 output_operand_lossage ("floating constant misused");
3002 /* Some assemblers need integer constants to appear first. */
3003 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3005 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3006 if (INTVAL (XEXP (x
, 1)) >= 0)
3007 fprintf (file
, "+");
3008 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3012 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3013 if (INTVAL (XEXP (x
, 0)) >= 0)
3014 fprintf (file
, "+");
3015 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3020 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3021 fprintf (file
, "-");
3022 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3026 output_operand_lossage ("invalid expression as operand");
3030 /* Append the correct conditional move suffix which corresponds to CODE. */
3033 put_condition_code (code
, reverse_cc
, mode
, file
)
3036 enum mode_class mode
;
3039 int ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
3040 && ! (cc_prev_status
.flags
& CC_FCOMI
));
3041 if (reverse_cc
&& ! ieee
)
3042 code
= reverse_condition (code
);
3044 if (mode
== MODE_INT
)
3048 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3055 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3094 output_operand_lossage ("Invalid %%C operand");
3097 else if (mode
== MODE_FLOAT
)
3101 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
);
3104 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
);
3107 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3110 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3113 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3116 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3119 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3122 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3125 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3128 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3131 output_operand_lossage ("Invalid %%C operand");
3136 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3137 C -- print opcode suffix for set/cmov insn.
3138 c -- like C, but print reversed condition
3139 F -- print opcode suffix for fcmov insn.
3140 f -- like C, but print reversed condition
3141 R -- print the prefix for register names.
3142 z -- print the opcode suffix for the size of the current operand.
3143 * -- print a star (in certain assembler syntax)
3144 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3145 c -- don't print special prefixes before constant operands.
3146 J -- print the appropriate jump operand.
3147 s -- print a shift double count, followed by the assemblers argument
3149 b -- print the QImode name of the register for the indicated operand.
3150 %b0 would print %al if operands[0] is reg 0.
3151 w -- likewise, print the HImode name of the register.
3152 k -- likewise, print the SImode name of the register.
3153 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3154 y -- print "st(0)" instead of "st" as a register.
3155 P -- print as a PIC constant */
3158 print_operand (file
, x
, code
)
3173 PUT_OP_SIZE (code
, 'l', file
);
3177 PUT_OP_SIZE (code
, 'w', file
);
3181 PUT_OP_SIZE (code
, 'b', file
);
3185 PUT_OP_SIZE (code
, 'l', file
);
3189 PUT_OP_SIZE (code
, 's', file
);
3193 PUT_OP_SIZE (code
, 't', file
);
3197 /* 387 opcodes don't get size suffixes if the operands are
3200 if (STACK_REG_P (x
))
3203 /* this is the size of op from size of operand */
3204 switch (GET_MODE_SIZE (GET_MODE (x
)))
3207 PUT_OP_SIZE ('B', 'b', file
);
3211 PUT_OP_SIZE ('W', 'w', file
);
3215 if (GET_MODE (x
) == SFmode
)
3217 PUT_OP_SIZE ('S', 's', file
);
3221 PUT_OP_SIZE ('L', 'l', file
);
3225 PUT_OP_SIZE ('T', 't', file
);
3229 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3231 #ifdef GAS_MNEMONICS
3232 PUT_OP_SIZE ('Q', 'q', file
);
3235 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3239 PUT_OP_SIZE ('Q', 'l', file
);
3252 switch (GET_CODE (x
))
3254 /* These conditions are appropriate for testing the result
3255 of an arithmetic operation, not for a compare operation.
3256 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3257 CC_Z_IN_NOT_C false and not floating point. */
3258 case NE
: fputs ("jne", file
); return;
3259 case EQ
: fputs ("je", file
); return;
3260 case GE
: fputs ("jns", file
); return;
3261 case LT
: fputs ("js", file
); return;
3262 case GEU
: fputs ("jmp", file
); return;
3263 case GTU
: fputs ("jne", file
); return;
3264 case LEU
: fputs ("je", file
); return;
3265 case LTU
: fputs ("#branch never", file
); return;
3267 /* no matching branches for GT nor LE */
3272 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3274 PRINT_OPERAND (file
, x
, 0);
3275 fputs (AS2C (,) + 1, file
);
3280 /* This is used by the conditional move instructions. */
3282 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3285 /* Like above, but reverse condition */
3287 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3290 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3293 /* Like above, but reverse condition */
3295 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3302 sprintf (str
, "invalid operand code `%c'", code
);
3303 output_operand_lossage (str
);
3308 if (GET_CODE (x
) == REG
)
3310 PRINT_REG (x
, code
, file
);
3313 else if (GET_CODE (x
) == MEM
)
3315 PRINT_PTR (x
, file
);
3316 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3319 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3321 output_addr_const (file
, XEXP (x
, 0));
3324 output_address (XEXP (x
, 0));
3327 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3332 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3333 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3334 PRINT_IMMED_PREFIX (file
);
3335 fprintf (file
, "0x%x", l
);
3338 /* These float cases don't actually occur as immediate operands. */
3339 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3344 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3345 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3346 fprintf (file
, "%s", dstr
);
3349 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3354 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3355 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3356 fprintf (file
, "%s", dstr
);
3362 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3363 PRINT_IMMED_PREFIX (file
);
3364 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3365 || GET_CODE (x
) == LABEL_REF
)
3366 PRINT_OFFSET_PREFIX (file
);
3369 output_pic_addr_const (file
, x
, code
);
3371 output_addr_const (file
, x
);
3375 /* Print a memory operand whose address is ADDR. */
3378 print_operand_address (file
, addr
)
3382 register rtx reg1
, reg2
, breg
, ireg
;
3385 switch (GET_CODE (addr
))
3389 fprintf (file
, "%se", RP
);
3390 fputs (hi_reg_name
[REGNO (addr
)], file
);
3400 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3402 offset
= XEXP (addr
, 0);
3403 addr
= XEXP (addr
, 1);
3405 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3407 offset
= XEXP (addr
, 1);
3408 addr
= XEXP (addr
, 0);
3411 if (GET_CODE (addr
) != PLUS
)
3413 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3414 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3415 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3416 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3417 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3418 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3419 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3420 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3422 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3439 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3440 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3445 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3451 if (ireg
!= 0 || breg
!= 0)
3458 output_pic_addr_const (file
, addr
, 0);
3459 else if (GET_CODE (addr
) == LABEL_REF
)
3460 output_asm_label (addr
);
3462 output_addr_const (file
, addr
);
3465 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3467 scale
= INTVAL (XEXP (ireg
, 1));
3468 ireg
= XEXP (ireg
, 0);
3471 /* The stack pointer can only appear as a base register,
3472 never an index register, so exchange the regs if it is wrong. */
3474 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3483 /* output breg+ireg*scale */
3484 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3492 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3494 scale
= INTVAL (XEXP (addr
, 0));
3495 ireg
= XEXP (addr
, 1);
3499 scale
= INTVAL (XEXP (addr
, 1));
3500 ireg
= XEXP (addr
, 0);
3503 output_addr_const (file
, const0_rtx
);
3504 PRINT_B_I_S (NULL_RTX
, ireg
, scale
, file
);
3509 if (GET_CODE (addr
) == CONST_INT
3510 && INTVAL (addr
) < 0x8000
3511 && INTVAL (addr
) >= -0x8000)
3512 fprintf (file
, "%d", INTVAL (addr
));
3516 output_pic_addr_const (file
, addr
, 0);
3518 output_addr_const (file
, addr
);
3523 /* Set the cc_status for the results of an insn whose pattern is EXP.
3524 On the 80386, we assume that only test and compare insns, as well
3525 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3526 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3527 Also, we assume that jumps, moves and sCOND don't affect the condition
3528 codes. All else clobbers the condition codes, by assumption.
3530 We assume that ALL integer add, minus, etc. instructions effect the
3531 condition codes. This MUST be consistent with i386.md.
3533 We don't record any float test or compare - the redundant test &
3534 compare check in final.c does not handle stack-like regs correctly. */
3537 notice_update_cc (exp
)
3540 if (GET_CODE (exp
) == SET
)
3542 /* Jumps do not alter the cc's. */
3543 if (SET_DEST (exp
) == pc_rtx
)
3546 #ifdef IS_STACK_MODE
3547 /* Moving into a memory of stack_mode may have been moved
3548 in between the use and set of cc0 by loop_spl(). So
3549 old value of cc.status must be retained */
3550 if (GET_CODE(SET_DEST(exp
)) == MEM
3551 && IS_STACK_MODE (GET_MODE (SET_DEST (exp
))))
3555 /* Moving register or memory into a register:
3556 it doesn't alter the cc's, but it might invalidate
3557 the RTX's which we remember the cc's came from.
3558 (Note that moving a constant 0 or 1 MAY set the cc's). */
3559 if (REG_P (SET_DEST (exp
))
3560 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3561 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3563 if (cc_status
.value1
3564 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3565 cc_status
.value1
= 0;
3567 if (cc_status
.value2
3568 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3569 cc_status
.value2
= 0;
3574 /* Moving register into memory doesn't alter the cc's.
3575 It may invalidate the RTX's which we remember the cc's came from. */
3576 if (GET_CODE (SET_DEST (exp
)) == MEM
3577 && (REG_P (SET_SRC (exp
))
3578 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3580 if (cc_status
.value1
3581 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3582 cc_status
.value1
= 0;
3583 if (cc_status
.value2
3584 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3585 cc_status
.value2
= 0;
3590 /* Function calls clobber the cc's. */
3591 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3597 /* Tests and compares set the cc's in predictable ways. */
3598 else if (SET_DEST (exp
) == cc0_rtx
)
3601 cc_status
.value1
= SET_SRC (exp
);
3605 /* Certain instructions effect the condition codes. */
3606 else if (GET_MODE (SET_SRC (exp
)) == SImode
3607 || GET_MODE (SET_SRC (exp
)) == HImode
3608 || GET_MODE (SET_SRC (exp
)) == QImode
)
3609 switch (GET_CODE (SET_SRC (exp
)))
3611 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3612 /* Shifts on the 386 don't set the condition codes if the
3613 shift count is zero. */
3614 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3620 /* We assume that the CONST_INT is non-zero (this rtx would
3621 have been deleted if it were zero. */
3623 case PLUS
: case MINUS
: case NEG
:
3624 case AND
: case IOR
: case XOR
:
3625 cc_status
.flags
= CC_NO_OVERFLOW
;
3626 cc_status
.value1
= SET_SRC (exp
);
3627 cc_status
.value2
= SET_DEST (exp
);
3638 else if (GET_CODE (exp
) == PARALLEL
3639 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3641 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3643 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3647 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3649 cc_status
.flags
|= CC_IN_80387
;
3650 if (TARGET_CMOVE
&& stack_regs_mentioned_p
3651 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3652 cc_status
.flags
|= CC_FCOMI
;
3655 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3667 /* Split one or more DImode RTL references into pairs of SImode
3668 references. The RTL can be REG, offsettable MEM, integer constant, or
3669 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3670 split and "num" is its length. lo_half and hi_half are output arrays
3671 that parallel "operands". */
3674 split_di (operands
, num
, lo_half
, hi_half
)
3677 rtx lo_half
[], hi_half
[];
3681 if (GET_CODE (operands
[num
]) == REG
)
3683 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3684 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3686 else if (CONSTANT_P (operands
[num
]))
3687 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3688 else if (offsettable_memref_p (operands
[num
]))
3690 lo_half
[num
] = operands
[num
];
3691 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3698 /* Return 1 if this is a valid binary operation on a 387.
3699 OP is the expression matched, and MODE is its mode. */
3702 binary_387_op (op
, mode
)
3704 enum machine_mode mode
;
3706 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3709 switch (GET_CODE (op
))
3715 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3722 /* Return 1 if this is a valid shift or rotate operation on a 386.
3723 OP is the expression matched, and MODE is its mode. */
3728 enum machine_mode mode
;
3730 rtx operand
= XEXP (op
, 0);
3732 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3735 if (GET_MODE (operand
) != GET_MODE (op
)
3736 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3739 return (GET_CODE (op
) == ASHIFT
3740 || GET_CODE (op
) == ASHIFTRT
3741 || GET_CODE (op
) == LSHIFTRT
3742 || GET_CODE (op
) == ROTATE
3743 || GET_CODE (op
) == ROTATERT
);
3746 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3747 MODE is not used. */
3750 VOIDmode_compare_op (op
, mode
)
3752 enum machine_mode mode
;
3754 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3757 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3758 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3759 is the expression of the binary operation. The output may either be
3760 emitted here, or returned to the caller, like all output_* functions.
3762 There is no guarantee that the operands are the same mode, as they
3763 might be within FLOAT or FLOAT_EXTEND expressions. */
3766 output_387_binary_op (insn
, operands
)
3772 static char buf
[100];
3774 switch (GET_CODE (operands
[3]))
3777 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3778 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3785 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3786 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3793 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3794 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3801 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3802 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3812 strcpy (buf
, base_op
);
3814 switch (GET_CODE (operands
[3]))
3818 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3821 operands
[2] = operands
[1];
3825 if (GET_CODE (operands
[2]) == MEM
)
3826 return strcat (buf
, AS1 (%z2
,%2));
3828 if (NON_STACK_REG_P (operands
[1]))
3830 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3834 else if (NON_STACK_REG_P (operands
[2]))
3836 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3840 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3841 return strcat (buf
, AS2 (p
,%2,%0));
3843 if (STACK_TOP_P (operands
[0]))
3844 return strcat (buf
, AS2C (%y2
,%0));
3846 return strcat (buf
, AS2C (%2,%0));
3850 if (GET_CODE (operands
[1]) == MEM
)
3851 return strcat (buf
, AS1 (r
%z1
,%1));
3853 if (GET_CODE (operands
[2]) == MEM
)
3854 return strcat (buf
, AS1 (%z2
,%2));
3856 if (NON_STACK_REG_P (operands
[1]))
3858 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3862 else if (NON_STACK_REG_P (operands
[2]))
3864 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3868 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3871 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3872 return strcat (buf
, AS2 (rp
,%2,%0));
3874 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3875 return strcat (buf
, AS2 (p
,%1,%0));
3877 if (STACK_TOP_P (operands
[0]))
3879 if (STACK_TOP_P (operands
[1]))
3880 return strcat (buf
, AS2C (%y2
,%0));
3882 return strcat (buf
, AS2 (r
,%y1
,%0));
3884 else if (STACK_TOP_P (operands
[1]))
3885 return strcat (buf
, AS2C (%1,%0));
3887 return strcat (buf
, AS2 (r
,%2,%0));
3894 /* Output code for INSN to convert a float to a signed int. OPERANDS
3895 are the insn operands. The output may be SFmode or DFmode and the
3896 input operand may be SImode or DImode. As a special case, make sure
3897 that the 387 stack top dies if the output mode is DImode, because the
3898 hardware requires this. */
3901 output_fix_trunc (insn
, operands
)
3905 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3908 if (! STACK_TOP_P (operands
[1])
3909 || (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3912 xops
[0] = GEN_INT (12);
3913 xops
[1] = operands
[4];
3915 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3916 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3917 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3918 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3919 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3921 if (NON_STACK_REG_P (operands
[0]))
3922 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
3924 else if (GET_CODE (operands
[0]) == MEM
)
3927 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3929 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3934 return AS1 (fldc
%W2
,%2);
3937 /* Output code for INSN to compare OPERANDS. The two operands might
3938 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3939 expression. If the compare is in mode CCFPEQmode, use an opcode that
3940 will not fault if a qNaN is present. */
3943 output_float_compare (insn
, operands
)
3948 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3949 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3952 if (TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
3954 cc_status
.flags
|= CC_FCOMI
;
3955 cc_prev_status
.flags
&= ~CC_TEST_AX
;
3958 if (! STACK_TOP_P (operands
[0]))
3961 operands
[0] = operands
[1];
3963 cc_status
.flags
|= CC_REVERSED
;
3966 if (! STACK_TOP_P (operands
[0]))
3969 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3971 if (STACK_REG_P (operands
[1])
3973 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3974 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3976 /* If both the top of the 387 stack dies, and the other operand
3977 is also a stack register that dies, then this must be a
3978 `fcompp' float compare */
3980 if (unordered_compare
)
3982 if (cc_status
.flags
& CC_FCOMI
)
3984 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
3985 output_asm_insn (AS1 (fstp
, %y0
), operands
);
3989 output_asm_insn ("fucompp", operands
);
3993 if (cc_status
.flags
& CC_FCOMI
)
3995 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
3996 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4000 output_asm_insn ("fcompp", operands
);
4005 static char buf
[100];
4007 /* Decide if this is the integer or float compare opcode, or the
4008 unordered float compare. */
4010 if (unordered_compare
)
4011 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
4012 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
4013 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
4015 strcpy (buf
, "ficom");
4017 /* Modify the opcode if the 387 stack is to be popped. */
4022 if (NON_STACK_REG_P (operands
[1]))
4023 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4024 else if (cc_status
.flags
& CC_FCOMI
)
4026 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
4030 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
4033 /* Now retrieve the condition code. */
4035 return output_fp_cc0_set (insn
);
4038 /* Output opcodes to transfer the results of FP compare or test INSN
4039 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4040 result of the compare or test is unordered, no comparison operator
4041 succeeds except NE. Return an output template, if any. */
4044 output_fp_cc0_set (insn
)
4048 rtx unordered_label
;
4052 xops
[0] = gen_rtx (REG
, HImode
, 0);
4053 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
4055 if (! TARGET_IEEE_FP
)
4057 if (!(cc_status
.flags
& CC_REVERSED
))
4059 next
= next_cc0_user (insn
);
4061 if (GET_CODE (next
) == JUMP_INSN
4062 && GET_CODE (PATTERN (next
)) == SET
4063 && SET_DEST (PATTERN (next
)) == pc_rtx
4064 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4065 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4066 else if (GET_CODE (PATTERN (next
)) == SET
)
4067 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4071 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
4072 || code
== LE
|| code
== GE
)
4074 /* We will test eax directly. */
4075 cc_status
.flags
|= CC_TEST_AX
;
4083 next
= next_cc0_user (insn
);
4084 if (next
== NULL_RTX
)
4087 if (GET_CODE (next
) == JUMP_INSN
4088 && GET_CODE (PATTERN (next
)) == SET
4089 && SET_DEST (PATTERN (next
)) == pc_rtx
4090 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4091 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4092 else if (GET_CODE (PATTERN (next
)) == SET
)
4094 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4095 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4097 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4100 else if (GET_CODE (PATTERN (next
)) == PARALLEL
4101 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
4103 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
4104 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
4106 code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
4111 xops
[0] = gen_rtx (REG
, QImode
, 0);
4116 xops
[1] = GEN_INT (0x45);
4117 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4122 xops
[1] = GEN_INT (0x45);
4123 xops
[2] = GEN_INT (0x01);
4124 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4125 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4130 xops
[1] = GEN_INT (0x05);
4131 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4136 xops
[1] = GEN_INT (0x45);
4137 xops
[2] = GEN_INT (0x40);
4138 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4139 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
4140 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4145 xops
[1] = GEN_INT (0x45);
4146 xops
[2] = GEN_INT (0x40);
4147 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4148 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4153 xops
[1] = GEN_INT (0x44);
4154 xops
[2] = GEN_INT (0x40);
4155 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4156 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
4171 #define MAX_386_STACK_LOCALS 2
4173 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4175 /* Define the structure for the machine field in struct function. */
4176 struct machine_function
4178 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4181 /* Functions to save and restore i386_stack_locals.
4182 These will be called, via pointer variables,
4183 from push_function_context and pop_function_context. */
4186 save_386_machine_status (p
)
4189 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
4190 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
4191 sizeof i386_stack_locals
);
4195 restore_386_machine_status (p
)
4198 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
4199 sizeof i386_stack_locals
);
4203 /* Clear stack slot assignments remembered from previous functions.
4204 This is called from INIT_EXPANDERS once before RTL is emitted for each
4208 clear_386_stack_locals ()
4210 enum machine_mode mode
;
4213 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4214 mode
= (enum machine_mode
) ((int) mode
+ 1))
4215 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4216 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4218 /* Arrange to save and restore i386_stack_locals around nested functions. */
4219 save_machine_status
= save_386_machine_status
;
4220 restore_machine_status
= restore_386_machine_status
;
4223 /* Return a MEM corresponding to a stack slot with mode MODE.
4224 Allocate a new slot if necessary.
4226 The RTL for a function can have several slots available: N is
4227 which slot to use. */
4230 assign_386_stack_local (mode
, n
)
4231 enum machine_mode mode
;
4234 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4237 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4238 i386_stack_locals
[(int) mode
][n
]
4239 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4241 return i386_stack_locals
[(int) mode
][n
];
4246 enum machine_mode mode
;
4248 return (GET_CODE (op
) == MULT
);
4253 enum machine_mode mode
;
4255 return (GET_CODE (op
) == DIV
);
4259 /* Create a new copy of an rtx.
4260 Recursively copies the operands of the rtx,
4261 except for those few rtx codes that are sharable.
4262 Doesn't share CONST */
4270 register RTX_CODE code
;
4271 register char *format_ptr
;
4273 code
= GET_CODE (orig
);
4286 /* SCRATCH must be shared because they represent distinct values. */
4291 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4292 a LABEL_REF, it isn't sharable. */
4293 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4294 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4295 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4299 /* A MEM with a constant address is not sharable. The problem is that
4300 the constant address may need to be reloaded. If the mem is shared,
4301 then reloading one copy of this mem will cause all copies to appear
4302 to have been reloaded. */
4305 copy
= rtx_alloc (code
);
4306 PUT_MODE (copy
, GET_MODE (orig
));
4307 copy
->in_struct
= orig
->in_struct
;
4308 copy
->volatil
= orig
->volatil
;
4309 copy
->unchanging
= orig
->unchanging
;
4310 copy
->integrated
= orig
->integrated
;
4312 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4314 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4316 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4318 switch (*format_ptr
++)
4321 XEXP (copy
, i
) = XEXP (orig
, i
);
4322 if (XEXP (orig
, i
) != NULL
)
4323 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4328 XEXP (copy
, i
) = XEXP (orig
, i
);
4333 XVEC (copy
, i
) = XVEC (orig
, i
);
4334 if (XVEC (orig
, i
) != NULL
)
4336 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4337 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4338 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4343 XWINT (copy
, i
) = XWINT (orig
, i
);
4347 XINT (copy
, i
) = XINT (orig
, i
);
4352 XSTR (copy
, i
) = XSTR (orig
, i
);
4363 /* Try to rewrite a memory address to make it valid */
4366 rewrite_address (mem_rtx
)
4369 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4371 int offset_adjust
= 0;
4372 int was_only_offset
= 0;
4373 rtx mem_addr
= XEXP (mem_rtx
, 0);
4374 char *storage
= oballoc (0);
4376 int is_spill_rtx
= 0;
4378 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4379 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4381 if (GET_CODE (mem_addr
) == PLUS
4382 && GET_CODE (XEXP (mem_addr
, 1)) == PLUS
4383 && GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4385 /* This part is utilized by the combiner. */
4387 = gen_rtx (PLUS
, GET_MODE (mem_addr
),
4388 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4389 XEXP (mem_addr
, 0), XEXP (XEXP (mem_addr
, 1), 0)),
4390 XEXP (XEXP (mem_addr
, 1), 1));
4392 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4394 XEXP (mem_rtx
, 0) = ret_rtx
;
4395 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4402 /* This part is utilized by loop.c.
4403 If the address contains PLUS (reg,const) and this pattern is invalid
4404 in this case - try to rewrite the address to make it valid. */
4405 storage
= oballoc (0);
4406 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4408 /* Find the base index and offset elements of the memory address. */
4409 if (GET_CODE (mem_addr
) == PLUS
)
4411 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4413 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4414 base_rtx
= XEXP (mem_addr
, 1), index_rtx
= XEXP (mem_addr
, 0);
4416 base_rtx
= XEXP (mem_addr
, 0), offset_rtx
= XEXP (mem_addr
, 1);
4419 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4421 index_rtx
= XEXP (mem_addr
, 0);
4422 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4423 base_rtx
= XEXP (mem_addr
, 1);
4425 offset_rtx
= XEXP (mem_addr
, 1);
4428 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4430 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
4431 && GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
4432 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0))
4434 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1))
4436 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1))
4438 && GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
4439 && GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4441 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4442 offset_rtx
= XEXP (mem_addr
, 1);
4443 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4444 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4448 offset_rtx
= XEXP (mem_addr
, 1);
4449 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4450 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4454 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4456 was_only_offset
= 1;
4459 offset_rtx
= XEXP (mem_addr
, 1);
4460 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4461 if (offset_adjust
== 0)
4463 XEXP (mem_rtx
, 0) = offset_rtx
;
4464 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4474 else if (GET_CODE (mem_addr
) == MULT
)
4475 index_rtx
= mem_addr
;
4482 if (index_rtx
!= 0 && GET_CODE (index_rtx
) == MULT
)
4484 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4490 scale_rtx
= XEXP (index_rtx
, 1);
4491 scale
= INTVAL (scale_rtx
);
4492 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4495 /* Now find which of the elements are invalid and try to fix them. */
4496 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4498 offset_adjust
= INTVAL (index_rtx
) * scale
;
4500 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4501 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4502 else if (offset_rtx
== 0)
4503 offset_rtx
= const0_rtx
;
4505 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4506 XEXP (mem_rtx
, 0) = offset_rtx
;
4510 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
4511 && GET_CODE (XEXP (base_rtx
, 0)) == REG
4512 && GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4514 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4515 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4518 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4520 offset_adjust
+= INTVAL (base_rtx
);
4524 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
4525 && GET_CODE (XEXP (index_rtx
, 0)) == REG
4526 && GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4528 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4529 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4534 if (! LEGITIMATE_INDEX_P (index_rtx
)
4535 && ! (index_rtx
== stack_pointer_rtx
&& scale
== 1
4536 && base_rtx
== NULL
))
4545 if (! LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4552 if (offset_adjust
!= 0)
4554 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4555 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4557 offset_rtx
= const0_rtx
;
4565 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4566 gen_rtx (MULT
, GET_MODE (index_rtx
),
4567 index_rtx
, scale_rtx
),
4570 if (GET_CODE (offset_rtx
) != CONST_INT
4571 || INTVAL (offset_rtx
) != 0)
4572 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4573 ret_rtx
, offset_rtx
);
4577 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4578 index_rtx
, base_rtx
);
4580 if (GET_CODE (offset_rtx
) != CONST_INT
4581 || INTVAL (offset_rtx
) != 0)
4582 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4583 ret_rtx
, offset_rtx
);
4590 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
),
4591 index_rtx
, scale_rtx
);
4593 if (GET_CODE (offset_rtx
) != CONST_INT
4594 || INTVAL (offset_rtx
) != 0)
4595 ret_rtx
= gen_rtx (PLUS
, GET_MODE (ret_rtx
),
4596 ret_rtx
, offset_rtx
);
4600 if (GET_CODE (offset_rtx
) == CONST_INT
4601 && INTVAL (offset_rtx
) == 0)
4602 ret_rtx
= index_rtx
;
4604 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4605 index_rtx
, offset_rtx
);
4613 if (GET_CODE (offset_rtx
) == CONST_INT
4614 && INTVAL (offset_rtx
) == 0)
4617 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
,
4620 else if (was_only_offset
)
4621 ret_rtx
= offset_rtx
;
4629 XEXP (mem_rtx
, 0) = ret_rtx
;
4630 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4641 /* Return 1 if the first insn to set cc before INSN also sets the register
4642 REG_RTX; otherwise return 0. */
4644 last_to_set_cc (reg_rtx
, insn
)
4647 rtx prev_insn
= PREV_INSN (insn
);
4651 if (GET_CODE (prev_insn
) == NOTE
)
4654 else if (GET_CODE (prev_insn
) == INSN
)
4656 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4659 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4661 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4667 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4674 prev_insn
= PREV_INSN (prev_insn
);
4681 doesnt_set_condition_code (pat
)
4684 switch (GET_CODE (pat
))
4697 sets_condition_code (pat
)
4700 switch (GET_CODE (pat
))
4722 str_immediate_operand (op
, mode
)
4724 enum machine_mode mode
;
4726 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4736 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4737 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4738 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4739 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4745 /* Return 1 if the mode of the SET_DEST of insn is floating point
4746 and it is not an fld or a move from memory to memory.
4747 Otherwise return 0 */
4753 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4754 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4755 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4756 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4757 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4758 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4759 && GET_CODE (SET_SRC (insn
)) != MEM
)
4765 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4766 memory and the source is a register. */
4772 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4773 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4774 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4775 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4776 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4777 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4783 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4784 or index to reference memory.
4785 otherwise return 0 */
4788 agi_dependent (insn
, dep_insn
)
4791 if (GET_CODE (dep_insn
) == INSN
4792 && GET_CODE (PATTERN (dep_insn
)) == SET
4793 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4794 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
);
4796 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4797 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4798 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4799 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4800 return reg_mentioned_in_mem (stack_pointer_rtx
, insn
);
4805 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4806 otherwise return 0. */
4809 reg_mentioned_in_mem (reg
, rtl
)
4814 register enum rtx_code code
;
4819 code
= GET_CODE (rtl
);
4835 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4838 fmt
= GET_RTX_FORMAT (code
);
4839 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4842 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4843 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4846 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4853 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4855 operands[0] = result, initialized with the startaddress
4856 operands[1] = alignment of the address.
4857 operands[2] = scratch register, initialized with the startaddress when
4858 not aligned, otherwise undefined
4860 This is just the body. It needs the initialisations mentioned above and
4861 some address computing at the end. These things are done in i386.md. */
4864 output_strlen_unroll (operands
)
4869 xops
[0] = operands
[0]; /* Result */
4870 /* operands[1]; * Alignment */
4871 xops
[1] = operands
[2]; /* Scratch */
4872 xops
[2] = GEN_INT (0);
4873 xops
[3] = GEN_INT (2);
4874 xops
[4] = GEN_INT (3);
4875 xops
[5] = GEN_INT (4);
4876 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4877 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4878 xops
[8] = gen_label_rtx (); /* label of main loop */
4880 if (TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4881 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4883 xops
[10] = gen_label_rtx (); /* end label 2 */
4884 xops
[11] = gen_label_rtx (); /* end label 1 */
4885 xops
[12] = gen_label_rtx (); /* end label */
4886 /* xops[13] * Temporary used */
4887 xops
[14] = GEN_INT (0xff);
4888 xops
[15] = GEN_INT (0xff00);
4889 xops
[16] = GEN_INT (0xff0000);
4890 xops
[17] = GEN_INT (0xff000000);
4892 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4894 /* Is there a known alignment and is it less than 4? */
4895 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4897 /* Is there a known alignment and is it not 2? */
4898 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4900 xops
[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4901 xops
[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4903 /* Leave just the 3 lower bits.
4904 If this is a q-register, then the high part is used later
4905 therefore use andl rather than andb. */
4906 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4908 /* Is aligned to 4-byte adress when zero */
4909 output_asm_insn (AS1 (je
,%l8
), xops
);
4911 /* Side-effect even Parity when %eax == 3 */
4912 output_asm_insn (AS1 (jp
,%6), xops
);
4914 /* Is it aligned to 2 bytes ? */
4915 if (QI_REG_P (xops
[1]))
4916 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4918 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4920 output_asm_insn (AS1 (je
,%7), xops
);
4924 /* Since the alignment is 2, we have to check 2 or 0 bytes;
4925 check if is aligned to 4 - byte. */
4926 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4928 /* Is aligned to 4-byte adress when zero */
4929 output_asm_insn (AS1 (je
,%l8
), xops
);
4932 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4934 /* Now compare the bytes; compare with the high part of a q-reg
4935 gives shorter code. */
4936 if (QI_REG_P (xops
[1]))
4938 /* Compare the first n unaligned byte on a byte per byte basis. */
4939 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4941 /* When zero we reached the end. */
4942 output_asm_insn (AS1 (je
,%l12
), xops
);
4944 /* Increment the address. */
4945 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4947 /* Not needed with an alignment of 2 */
4948 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4950 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4951 CODE_LABEL_NUMBER (xops
[7]));
4952 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4953 output_asm_insn (AS1 (je
,%l12
), xops
);
4954 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4956 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4957 CODE_LABEL_NUMBER (xops
[6]));
4960 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4964 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4965 output_asm_insn (AS1 (je
,%l12
), xops
);
4966 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4968 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4969 CODE_LABEL_NUMBER (xops
[7]));
4970 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4971 output_asm_insn (AS1 (je
,%l12
), xops
);
4972 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4974 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4975 CODE_LABEL_NUMBER (xops
[6]));
4976 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4979 output_asm_insn (AS1 (je
,%l12
), xops
);
4980 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4983 /* Generate loop to check 4 bytes at a time. It is not a good idea to
4984 align this loop. It gives only huge programs, but does not help to
4986 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4988 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4989 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4991 if (QI_REG_P (xops
[1]))
4993 /* On i586 it is faster to combine the hi- and lo- part as
4994 a kind of lookahead. If anding both yields zero, then one
4995 of both *could* be zero, otherwise none of both is zero;
4996 this saves one instruction, on i486 this is slower
4997 tested with P-90, i486DX2-66, AMD486DX2-66 */
5000 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
5001 output_asm_insn (AS1 (jne
,%l9
), xops
);
5004 /* Check first byte. */
5005 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
5006 output_asm_insn (AS1 (je
,%l12
), xops
);
5008 /* Check second byte. */
5009 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
5010 output_asm_insn (AS1 (je
,%l11
), xops
);
5013 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5014 CODE_LABEL_NUMBER (xops
[9]));
5019 /* Check first byte. */
5020 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
5021 output_asm_insn (AS1 (je
,%l12
), xops
);
5023 /* Check second byte. */
5024 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
5025 output_asm_insn (AS1 (je
,%l11
), xops
);
5028 /* Check third byte. */
5029 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
5030 output_asm_insn (AS1 (je
,%l10
), xops
);
5032 /* Check fourth byte and increment address. */
5033 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
5034 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
5035 output_asm_insn (AS1 (jne
,%l8
), xops
);
5037 /* Now generate fixups when the compare stops within a 4-byte word. */
5038 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
5040 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
5041 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5043 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
5044 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5046 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));