1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem
/* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost
= { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost
= { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost
= {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 3, /* variable shift costs */
84 1, /* constant shift costs */
85 12, /* cost of starting a multiply */
86 1, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs
*ix86_cost
= &pentium_cost
;
92 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
94 extern FILE *asm_out_file
;
95 extern char *strcat ();
97 char *singlemove_string ();
98 char *output_move_const_single ();
99 char *output_fp_cc0_set ();
101 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
102 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
103 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
105 /* Array of the smallest class containing reg number REGNO, indexed by
106 REGNO. Used by REGNO_REG_CLASS in i386.h. */
108 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
111 AREG
, DREG
, CREG
, BREG
,
113 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
115 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
116 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
121 /* Test and compare insns in i386.md store the information needed to
122 generate branch and scc insns here. */
124 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
125 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
126 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
128 /* which cpu are we scheduling for */
129 enum processor_type ix86_cpu
;
131 /* which instruction set architecture to use. */
134 /* Strings to hold which cpu and instruction set architecture to use. */
135 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
136 char *ix86_arch_string
; /* for -march=<xxx> */
138 /* Register allocation order */
139 char *i386_reg_alloc_order
;
140 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
142 /* # of registers to use to pass arguments. */
143 char *i386_regparm_string
; /* # registers to use to pass args */
144 int i386_regparm
; /* i386_regparm_string as a number */
146 /* Alignment to use for loops and jumps */
147 char *i386_align_loops_string
; /* power of two alignment for loops */
148 char *i386_align_jumps_string
; /* power of two alignment for non-loop jumps */
149 char *i386_align_funcs_string
; /* power of two alignment for functions */
150 char *i386_branch_cost_string
; /* values 1-5: see jump.c */
152 int i386_align_loops
; /* power of two alignment for loops */
153 int i386_align_jumps
; /* power of two alignment for non-loop jumps */
154 int i386_align_funcs
; /* power of two alignment for functions */
155 int i386_branch_cost
; /* values 1-5: see jump.c */
157 /* Sometimes certain combinations of command options do not make
158 sense on a particular target machine. You can define a macro
159 `OVERRIDE_OPTIONS' to take account of this. This macro, if
160 defined, is executed once just after all the command options have
163 Don't use this macro to turn on various extra optimizations for
164 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
175 char *name
; /* Canonical processor name. */
176 enum processor_type processor
; /* Processor type enum value. */
177 struct processor_costs
*cost
; /* Processor costs */
178 int target_enable
; /* Target flags to enable. */
179 int target_disable
; /* Target flags to disable. */
180 } processor_target_table
[]
181 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
182 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
183 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
184 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
185 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0},
186 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0}};
188 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
190 #ifdef SUBTARGET_OVERRIDE_OPTIONS
191 SUBTARGET_OVERRIDE_OPTIONS
;
194 /* Validate registers in register allocation order */
195 if (i386_reg_alloc_order
)
197 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
201 case 'a': regno
= 0; break;
202 case 'd': regno
= 1; break;
203 case 'c': regno
= 2; break;
204 case 'b': regno
= 3; break;
205 case 'S': regno
= 4; break;
206 case 'D': regno
= 5; break;
207 case 'B': regno
= 6; break;
209 default: fatal ("Register '%c' is unknown", ch
);
212 if (regs_allocated
[regno
])
213 fatal ("Register '%c' was already specified in the allocation order", ch
);
215 regs_allocated
[regno
] = 1;
219 /* Get the architectural level. */
220 if (ix86_cpu_string
== (char *)0 && ix86_arch_string
== (char *)0)
222 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
223 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
226 for (i
= 0; i
< ptt_size
; i
++)
227 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
229 ix86_arch
= processor_target_table
[i
].processor
;
230 if (ix86_cpu_string
== (char *)0)
231 ix86_cpu_string
= processor_target_table
[i
].name
;
237 error ("bad value (%s) for -march= switch", ix86_arch_string
);
238 ix86_arch_string
= PROCESSOR_DEFAULT_STRING
;
239 ix86_arch
= PROCESSOR_DEFAULT
;
242 for (j
= 0; j
< ptt_size
; j
++)
243 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
245 ix86_cpu
= processor_target_table
[j
].processor
;
246 if (i
> j
&& (int)ix86_arch
>= (int)PROCESSOR_PENTIUMPRO
)
247 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string
, ix86_arch_string
);
249 target_flags
|= processor_target_table
[j
].target_enable
;
250 target_flags
&= ~processor_target_table
[j
].target_disable
;
256 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
257 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
258 ix86_cpu
= PROCESSOR_DEFAULT
;
261 /* Validate -mregparm= value */
262 if (i386_regparm_string
)
264 i386_regparm
= atoi (i386_regparm_string
);
265 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
266 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm
, REGPARM_MAX
);
269 def_align
= (TARGET_386
) ? 2 : 4;
271 /* Validate -malign-loops= value, or provide default */
272 if (i386_align_loops_string
)
274 i386_align_loops
= atoi (i386_align_loops_string
);
275 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
276 fatal ("-malign-loops=%d is not between 0 and %d",
277 i386_align_loops
, MAX_CODE_ALIGN
);
280 i386_align_loops
= 2;
282 /* Validate -malign-jumps= value, or provide default */
283 if (i386_align_jumps_string
)
285 i386_align_jumps
= atoi (i386_align_jumps_string
);
286 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
287 fatal ("-malign-jumps=%d is not between 0 and %d",
288 i386_align_jumps
, MAX_CODE_ALIGN
);
291 i386_align_jumps
= def_align
;
293 /* Validate -malign-functions= value, or provide default */
294 if (i386_align_funcs_string
)
296 i386_align_funcs
= atoi (i386_align_funcs_string
);
297 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
298 fatal ("-malign-functions=%d is not between 0 and %d",
299 i386_align_funcs
, MAX_CODE_ALIGN
);
302 i386_align_funcs
= def_align
;
304 /* Validate -mbranch-cost= value, or provide default */
305 if (i386_branch_cost_string
)
307 i386_branch_cost
= atoi (i386_branch_cost_string
);
308 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
309 fatal ("-mbranch-cost=%d is not between 0 and 5",
313 i386_branch_cost
= TARGET_PENTIUMPRO
? 4 : 1;
315 if (TARGET_OMIT_LEAF_FRAME_POINTER
) /* keep nonleaf frame pointers */
316 flag_omit_frame_pointer
= 1;
318 /* pic references don't explicitly mention pic_offset_table_rtx */
319 /* code threaded into the prologue may conflict with profiling */
320 if (flag_pic
|| profile_flag
|| profile_block_flag
)
321 target_flags
&= ~MASK_SCHEDULE_PROLOGUE
;
324 /* A C statement (sans semicolon) to choose the order in which to
325 allocate hard registers for pseudo-registers local to a basic
328 Store the desired register order in the array `reg_alloc_order'.
329 Element 0 should be the register to allocate first; element 1, the
330 next register; and so on.
332 The macro body should not assume anything about the contents of
333 `reg_alloc_order' before execution of the macro.
335 On most machines, it is not necessary to define this macro. */
338 order_regs_for_local_alloc ()
340 int i
, ch
, order
, regno
;
342 /* User specified the register allocation order */
343 if (i386_reg_alloc_order
)
345 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
349 case 'a': regno
= 0; break;
350 case 'd': regno
= 1; break;
351 case 'c': regno
= 2; break;
352 case 'b': regno
= 3; break;
353 case 'S': regno
= 4; break;
354 case 'D': regno
= 5; break;
355 case 'B': regno
= 6; break;
358 reg_alloc_order
[order
++] = regno
;
361 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
363 if (!regs_allocated
[i
])
364 reg_alloc_order
[order
++] = i
;
368 /* If users did not specify a register allocation order, use natural order */
371 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
372 reg_alloc_order
[i
] = i
;
378 optimization_options (level
)
381 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
382 make the problem with not enough registers even worse */
383 #ifdef INSN_SCHEDULING
385 flag_schedule_insns
= 0;
389 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
390 attribute for DECL. The attributes in ATTRIBUTES have previously been
394 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
403 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
404 attribute for TYPE. The attributes in ATTRIBUTES have previously been
408 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
414 if (TREE_CODE (type
) != FUNCTION_TYPE
415 && TREE_CODE (type
) != FIELD_DECL
416 && TREE_CODE (type
) != TYPE_DECL
)
419 /* Stdcall attribute says callee is responsible for popping arguments
420 if they are not variable. */
421 if (is_attribute_p ("stdcall", identifier
))
422 return (args
== NULL_TREE
);
424 /* Cdecl attribute says the callee is a normal C declaration */
425 if (is_attribute_p ("cdecl", identifier
))
426 return (args
== NULL_TREE
);
428 /* Regparm attribute specifies how many integer arguments are to be
429 passed in registers */
430 if (is_attribute_p ("regparm", identifier
))
434 if (!args
|| TREE_CODE (args
) != TREE_LIST
435 || TREE_CHAIN (args
) != NULL_TREE
436 || TREE_VALUE (args
) == NULL_TREE
)
439 cst
= TREE_VALUE (args
);
440 if (TREE_CODE (cst
) != INTEGER_CST
)
443 if (TREE_INT_CST_HIGH (cst
) != 0
444 || TREE_INT_CST_LOW (cst
) < 0
445 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
454 /* Return 0 if the attributes for two types are incompatible, 1 if they
455 are compatible, and 2 if they are nearly compatible (which causes a
456 warning to be generated). */
459 i386_comp_type_attributes (type1
, type2
)
467 /* Value is the number of bytes of arguments automatically
468 popped when returning from a subroutine call.
469 FUNDECL is the declaration node of the function (as a tree),
470 FUNTYPE is the data type of the function (as a tree),
471 or for a library call it is an identifier node for the subroutine name.
472 SIZE is the number of bytes of arguments passed on the stack.
474 On the 80386, the RTD insn may be used to pop them if the number
475 of args is fixed, but if the number is variable then the caller
476 must pop them all. RTD can't be used for library calls now
477 because the library is compiled with the Unix compiler.
478 Use of RTD is a selectable option, since it is incompatible with
479 standard Unix calling sequences. If the option is not selected,
480 the caller must always pop the args.
482 The attribute stdcall is equivalent to RTD on a per module basis. */
485 i386_return_pops_args (fundecl
, funtype
, size
)
490 int rtd
= TARGET_RTD
;
492 if (TREE_CODE (funtype
) == IDENTIFIER_NODE
)
495 /* Cdecl functions override -mrtd, and never pop the stack */
496 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
498 /* Stdcall functions will pop the stack if not variable args */
499 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
503 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
504 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
)))
508 /* Lose any fake structure return argument */
509 if (aggregate_value_p (TREE_TYPE (funtype
)))
510 return GET_MODE_SIZE (Pmode
);
516 /* Argument support functions. */
518 /* Initialize a variable CUM of type CUMULATIVE_ARGS
519 for a call to a function whose data type is FNTYPE.
520 For a library call, FNTYPE is 0. */
523 init_cumulative_args (cum
, fntype
, libname
)
524 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
525 tree fntype
; /* tree ptr for function decl */
526 rtx libname
; /* SYMBOL_REF of library name or 0 */
528 static CUMULATIVE_ARGS zero_cum
;
529 tree param
, next_param
;
531 if (TARGET_DEBUG_ARG
)
533 fprintf (stderr
, "\ninit_cumulative_args (");
536 tree ret_type
= TREE_TYPE (fntype
);
537 fprintf (stderr
, "fntype code = %s, ret code = %s",
538 tree_code_name
[ (int)TREE_CODE (fntype
) ],
539 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
542 fprintf (stderr
, "no fntype");
545 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
550 /* Set up the number of registers to use for passing arguments. */
551 cum
->nregs
= i386_regparm
;
554 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
556 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
559 /* Determine if this function has variable arguments. This is
560 indicated by the last argument being 'void_type_mode' if there
561 are no variable arguments. If there are variable arguments, then
562 we won't pass anything in registers */
566 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
570 next_param
= TREE_CHAIN (param
);
571 if (next_param
== (tree
)0 && TREE_VALUE (param
) != void_type_node
)
576 if (TARGET_DEBUG_ARG
)
577 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
582 /* Update the data in CUM to advance over an argument
583 of mode MODE and data type TYPE.
584 (TYPE is null for libcalls where that information may not be available.) */
587 function_arg_advance (cum
, mode
, type
, named
)
588 CUMULATIVE_ARGS
*cum
; /* current arg information */
589 enum machine_mode mode
; /* current arg mode */
590 tree type
; /* type of the argument or 0 if lib support */
591 int named
; /* whether or not the argument was named */
593 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
594 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
596 if (TARGET_DEBUG_ARG
)
598 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
599 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
614 /* Define where to put the arguments to a function.
615 Value is zero to push the argument on the stack,
616 or a hard register in which to store the argument.
618 MODE is the argument's machine mode.
619 TYPE is the data type of the argument (as a tree).
620 This is null for libcalls where that information may
622 CUM is a variable of type CUMULATIVE_ARGS which gives info about
623 the preceding args and about the function being called.
624 NAMED is nonzero if this argument is a named parameter
625 (otherwise it is an extra parameter matching an ellipsis). */
628 function_arg (cum
, mode
, type
, named
)
629 CUMULATIVE_ARGS
*cum
; /* current arg information */
630 enum machine_mode mode
; /* current arg mode */
631 tree type
; /* type of the argument or 0 if lib support */
632 int named
; /* != 0 for normal args, == 0 for ... args */
635 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
636 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
640 default: /* for now, pass fp/complex values on the stack */
648 if (words
<= cum
->nregs
)
649 ret
= gen_rtx (REG
, mode
, cum
->regno
);
653 if (TARGET_DEBUG_ARG
)
656 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
657 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
660 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
662 fprintf (stderr
, ", stack");
664 fprintf (stderr
, " )\n");
670 /* For an arg passed partly in registers and partly in memory,
671 this is the number of registers used.
672 For args passed entirely in registers or entirely in memory, zero. */
675 function_arg_partial_nregs (cum
, mode
, type
, named
)
676 CUMULATIVE_ARGS
*cum
; /* current arg information */
677 enum machine_mode mode
; /* current arg mode */
678 tree type
; /* type of the argument or 0 if lib support */
679 int named
; /* != 0 for normal args, == 0 for ... args */
685 /* Output an insn whose source is a 386 integer register. SRC is the
686 rtx for the register, and TEMPLATE is the op-code template. SRC may
687 be either SImode or DImode.
689 The template will be output with operands[0] as SRC, and operands[1]
690 as a pointer to the top of the 386 stack. So a call from floatsidf2
691 would look like this:
693 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
695 where %z0 corresponds to the caller's operands[1], and is used to
696 emit the proper size suffix.
698 ??? Extend this to handle HImode - a 387 can load and store HImode
702 output_op_from_reg (src
, template)
707 int size
= GET_MODE_SIZE (GET_MODE (src
));
710 xops
[1] = AT_SP (Pmode
);
711 xops
[2] = GEN_INT (size
);
712 xops
[3] = stack_pointer_rtx
;
714 if (size
> UNITS_PER_WORD
)
717 if (size
> 2 * UNITS_PER_WORD
)
719 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
720 output_asm_insn (AS1 (push
%L0
,%0), &high
);
722 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
723 output_asm_insn (AS1 (push
%L0
,%0), &high
);
725 output_asm_insn (AS1 (push
%L0
,%0), &src
);
727 output_asm_insn (template, xops
);
729 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
732 /* Output an insn to pop an value from the 387 top-of-stack to 386
733 register DEST. The 387 register stack is popped if DIES is true. If
734 the mode of DEST is an integer mode, a `fist' integer store is done,
735 otherwise a `fst' float store is done. */
738 output_to_reg (dest
, dies
)
743 int size
= GET_MODE_SIZE (GET_MODE (dest
));
745 xops
[0] = AT_SP (Pmode
);
746 xops
[1] = stack_pointer_rtx
;
747 xops
[2] = GEN_INT (size
);
750 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
752 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
755 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
757 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
759 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
762 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
765 if (GET_MODE (dest
) == XFmode
)
767 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
768 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
771 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
777 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
779 if (size
> UNITS_PER_WORD
)
781 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
782 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
783 if (size
> 2 * UNITS_PER_WORD
)
785 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
786 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
792 singlemove_string (operands
)
796 if (GET_CODE (operands
[0]) == MEM
797 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
799 if (XEXP (x
, 0) != stack_pointer_rtx
)
803 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
805 return output_move_const_single (operands
);
807 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
808 return AS2 (mov
%L0
,%1,%0);
809 else if (CONSTANT_P (operands
[1]))
810 return AS2 (mov
%L0
,%1,%0);
813 output_asm_insn ("push%L1 %1", operands
);
818 /* Return a REG that occurs in ADDR with coefficient 1.
819 ADDR can be effectively incremented by incrementing REG. */
825 while (GET_CODE (addr
) == PLUS
)
827 if (GET_CODE (XEXP (addr
, 0)) == REG
)
828 addr
= XEXP (addr
, 0);
829 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
830 addr
= XEXP (addr
, 1);
831 else if (CONSTANT_P (XEXP (addr
, 0)))
832 addr
= XEXP (addr
, 1);
833 else if (CONSTANT_P (XEXP (addr
, 1)))
834 addr
= XEXP (addr
, 0);
838 if (GET_CODE (addr
) == REG
)
844 /* Output an insn to add the constant N to the register X. */
855 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
857 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
860 xops
[1] = GEN_INT (-n
);
861 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
865 xops
[1] = GEN_INT (n
);
866 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
871 /* Output assembler code to perform a doubleword move insn
872 with operands OPERANDS. */
875 output_move_double (operands
)
878 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
882 rtx addreg0
= 0, addreg1
= 0;
883 int dest_overlapped_low
= 0;
884 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
889 /* First classify both operands. */
891 if (REG_P (operands
[0]))
893 else if (offsettable_memref_p (operands
[0]))
895 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
897 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
899 else if (GET_CODE (operands
[0]) == MEM
)
904 if (REG_P (operands
[1]))
906 else if (CONSTANT_P (operands
[1]))
908 else if (offsettable_memref_p (operands
[1]))
910 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
912 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
914 else if (GET_CODE (operands
[1]) == MEM
)
919 /* Check for the cases that the operand constraints are not
920 supposed to allow to happen. Abort if we get one,
921 because generating code for these cases is painful. */
923 if (optype0
== RNDOP
|| optype1
== RNDOP
)
926 /* If one operand is decrementing and one is incrementing
927 decrement the former register explicitly
928 and change that operand into ordinary indexing. */
930 if (optype0
== PUSHOP
&& optype1
== POPOP
)
932 /* ??? Can this ever happen on i386? */
933 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
934 asm_add (-size
, operands
[0]);
935 if (GET_MODE (operands
[1]) == XFmode
)
936 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
937 else if (GET_MODE (operands
[0]) == DFmode
)
938 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
940 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
944 if (optype0
== POPOP
&& optype1
== PUSHOP
)
946 /* ??? Can this ever happen on i386? */
947 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
948 asm_add (-size
, operands
[1]);
949 if (GET_MODE (operands
[1]) == XFmode
)
950 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
951 else if (GET_MODE (operands
[1]) == DFmode
)
952 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
954 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
958 /* If an operand is an unoffsettable memory ref, find a register
959 we can increment temporarily to make it refer to the second word. */
961 if (optype0
== MEMOP
)
962 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
964 if (optype1
== MEMOP
)
965 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
967 /* Ok, we can do one word at a time.
968 Normally we do the low-numbered word first,
969 but if either operand is autodecrementing then we
970 do the high-numbered word first.
972 In either case, set up in LATEHALF the operands to use
973 for the high-numbered word and in some cases alter the
974 operands in OPERANDS to be suitable for the low-numbered word. */
978 if (optype0
== REGOP
)
980 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
981 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
983 else if (optype0
== OFFSOP
)
985 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
986 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
990 middlehalf
[0] = operands
[0];
991 latehalf
[0] = operands
[0];
994 if (optype1
== REGOP
)
996 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
997 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
999 else if (optype1
== OFFSOP
)
1001 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1002 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1004 else if (optype1
== CNSTOP
)
1006 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1008 REAL_VALUE_TYPE r
; long l
[3];
1010 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1011 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1012 operands
[1] = GEN_INT (l
[0]);
1013 middlehalf
[1] = GEN_INT (l
[1]);
1014 latehalf
[1] = GEN_INT (l
[2]);
1016 else if (CONSTANT_P (operands
[1]))
1017 /* No non-CONST_DOUBLE constant should ever appear here. */
1022 middlehalf
[1] = operands
[1];
1023 latehalf
[1] = operands
[1];
1026 else /* size is not 12: */
1028 if (optype0
== REGOP
)
1029 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1030 else if (optype0
== OFFSOP
)
1031 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1033 latehalf
[0] = operands
[0];
1035 if (optype1
== REGOP
)
1036 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1037 else if (optype1
== OFFSOP
)
1038 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1039 else if (optype1
== CNSTOP
)
1040 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1042 latehalf
[1] = operands
[1];
1045 /* If insn is effectively movd N (sp),-(sp) then we will do the
1046 high word first. We should use the adjusted operand 1
1047 (which is N+4 (sp) or N+8 (sp))
1048 for the low word and middle word as well,
1049 to compensate for the first decrement of sp. */
1050 if (optype0
== PUSHOP
1051 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1052 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1053 middlehalf
[1] = operands
[1] = latehalf
[1];
1055 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1056 if the upper part of reg N does not appear in the MEM, arrange to
1057 emit the move late-half first. Otherwise, compute the MEM address
1058 into the upper part of N and use that as a pointer to the memory
1060 if (optype0
== REGOP
1061 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1063 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1064 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1066 /* If both halves of dest are used in the src memory address,
1067 compute the address into latehalf of dest. */
1069 xops
[0] = latehalf
[0];
1070 xops
[1] = XEXP (operands
[1], 0);
1071 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1072 if( GET_MODE (operands
[1]) == XFmode
)
1075 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1076 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1077 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1081 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1082 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1086 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1088 /* Check for two regs used by both source and dest. */
1089 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1090 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1093 /* JRV says this can't happen: */
1094 if (addreg0
|| addreg1
)
1097 /* Only the middle reg conflicts; simply put it last. */
1098 output_asm_insn (singlemove_string (operands
), operands
);
1099 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1100 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1103 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1104 /* If the low half of dest is mentioned in the source memory
1105 address, the arrange to emit the move late half first. */
1106 dest_overlapped_low
= 1;
1109 /* If one or both operands autodecrementing,
1110 do the two words, high-numbered first. */
1112 /* Likewise, the first move would clobber the source of the second one,
1113 do them in the other order. This happens only for registers;
1114 such overlap can't happen in memory unless the user explicitly
1115 sets it up, and that is an undefined circumstance. */
1118 if (optype0 == PUSHOP || optype1 == PUSHOP
1119 || (optype0 == REGOP && optype1 == REGOP
1120 && REGNO (operands[0]) == REGNO (latehalf[1]))
1121 || dest_overlapped_low)
1123 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1124 || (optype0
== REGOP
&& optype1
== REGOP
1125 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1126 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1127 || dest_overlapped_low
)
1129 /* Make any unoffsettable addresses point at high-numbered word. */
1131 asm_add (size
-4, addreg0
);
1133 asm_add (size
-4, addreg1
);
1136 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1138 /* Undo the adds we just did. */
1140 asm_add (-4, addreg0
);
1142 asm_add (-4, addreg1
);
1146 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1148 asm_add (-4, addreg0
);
1150 asm_add (-4, addreg1
);
1153 /* Do low-numbered word. */
1154 return singlemove_string (operands
);
1157 /* Normal case: do the two words, low-numbered first. */
1159 output_asm_insn (singlemove_string (operands
), operands
);
1161 /* Do the middle one of the three words for long double */
1165 asm_add (4, addreg0
);
1167 asm_add (4, addreg1
);
1169 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1172 /* Make any unoffsettable addresses point at high-numbered word. */
1174 asm_add (4, addreg0
);
1176 asm_add (4, addreg1
);
1179 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1181 /* Undo the adds we just did. */
1183 asm_add (4-size
, addreg0
);
1185 asm_add (4-size
, addreg1
);
1191 #define MAX_TMPS 2 /* max temporary registers used */
1193 /* Output the appropriate code to move push memory on the stack */
1196 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1208 } tmp_info
[MAX_TMPS
];
1210 rtx src
= operands
[1];
1213 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1214 int stack_offset
= 0;
1218 if (!offsettable_memref_p (src
))
1219 fatal_insn ("Source is not offsettable", insn
);
1221 if ((length
& 3) != 0)
1222 fatal_insn ("Pushing non-word aligned size", insn
);
1224 /* Figure out which temporary registers we have available */
1225 for (i
= tmp_start
; i
< n_operands
; i
++)
1227 if (GET_CODE (operands
[i
]) == REG
)
1229 if (reg_overlap_mentioned_p (operands
[i
], src
))
1232 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1233 if (max_tmps
== MAX_TMPS
)
1239 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1241 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1242 output_asm_insn (AS1(push
%L0
,%0), xops
);
1248 for (offset
= length
- 4; offset
>= 0; )
1250 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1252 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1253 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1254 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1258 for (i
= 0; i
< num_tmps
; i
++)
1259 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1261 for (i
= 0; i
< num_tmps
; i
++)
1262 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1265 stack_offset
+= 4*num_tmps
;
1273 /* Output the appropriate code to move data between two memory locations */
1276 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1287 } tmp_info
[MAX_TMPS
];
1289 rtx dest
= operands
[0];
1290 rtx src
= operands
[1];
1291 rtx qi_tmp
= NULL_RTX
;
1297 if (GET_CODE (dest
) == MEM
1298 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1299 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1300 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1302 if (!offsettable_memref_p (src
))
1303 fatal_insn ("Source is not offsettable", insn
);
1305 if (!offsettable_memref_p (dest
))
1306 fatal_insn ("Destination is not offsettable", insn
);
1308 /* Figure out which temporary registers we have available */
1309 for (i
= tmp_start
; i
< n_operands
; i
++)
1311 if (GET_CODE (operands
[i
]) == REG
)
1313 if ((length
& 1) != 0 && !qi_tmp
&& QI_REG_P (operands
[i
]))
1314 qi_tmp
= operands
[i
];
1316 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1317 fatal_insn ("Temporary register overlaps the destination", insn
);
1319 if (reg_overlap_mentioned_p (operands
[i
], src
))
1320 fatal_insn ("Temporary register overlaps the source", insn
);
1322 tmp_info
[ max_tmps
++ ].xops
[2] = operands
[i
];
1323 if (max_tmps
== MAX_TMPS
)
1329 fatal_insn ("No scratch registers were found to do memory->memory moves", insn
);
1331 if ((length
& 1) != 0)
1334 fatal_insn ("No byte register found when moving odd # of bytes.", insn
);
1339 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1343 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1344 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1345 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1346 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1350 else if (length
>= 2)
1352 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1353 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1354 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1355 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1363 for (i
= 0; i
< num_tmps
; i
++)
1364 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1366 for (i
= 0; i
< num_tmps
; i
++)
1367 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1372 xops
[0] = adj_offsettable_operand (dest
, offset
);
1373 xops
[1] = adj_offsettable_operand (src
, offset
);
1375 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1376 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1384 standard_80387_constant_p (x
)
1387 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1392 if (setjmp (handler
))
1395 set_float_handler (handler
);
1396 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1397 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1398 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1399 set_float_handler (NULL_PTR
);
1407 /* Note that on the 80387, other constants, such as pi,
1408 are much slower to load as standard constants
1409 than to load from doubles in memory! */
1416 output_move_const_single (operands
)
1419 if (FP_REG_P (operands
[0]))
1421 int conval
= standard_80387_constant_p (operands
[1]);
1429 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1431 REAL_VALUE_TYPE r
; long l
;
1433 if (GET_MODE (operands
[1]) == XFmode
)
1436 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1437 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1438 operands
[1] = GEN_INT (l
);
1440 return singlemove_string (operands
);
1443 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1444 reference and a constant. */
1447 symbolic_operand (op
, mode
)
1449 enum machine_mode mode
;
1451 switch (GET_CODE (op
))
1458 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1459 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1460 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1466 /* Test for a valid operand for a call instruction.
1467 Don't allow the arg pointer register or virtual regs
1468 since they may change into reg + const, which the patterns
1469 can't handle yet. */
1472 call_insn_operand (op
, mode
)
1474 enum machine_mode mode
;
1476 if (GET_CODE (op
) == MEM
1477 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1478 /* This makes a difference for PIC. */
1479 && general_operand (XEXP (op
, 0), Pmode
))
1480 || (GET_CODE (XEXP (op
, 0)) == REG
1481 && XEXP (op
, 0) != arg_pointer_rtx
1482 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1483 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1488 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1492 expander_call_insn_operand (op
, mode
)
1494 enum machine_mode mode
;
1496 if (GET_CODE (op
) == MEM
1497 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1498 || (GET_CODE (XEXP (op
, 0)) == REG
1499 && XEXP (op
, 0) != arg_pointer_rtx
1500 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1501 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1506 /* Return 1 if OP is a comparison operator that can use the condition code
1507 generated by an arithmetic operation. */
1510 arithmetic_comparison_operator (op
, mode
)
1512 enum machine_mode mode
;
1516 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1518 code
= GET_CODE (op
);
1519 if (GET_RTX_CLASS (code
) != '<')
1522 return (code
!= GT
&& code
!= LE
);
1525 /* Returns 1 if OP contains a symbol reference */
1528 symbolic_reference_mentioned_p (op
)
1534 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1537 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1538 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1544 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1545 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1548 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1555 /* Attempt to expand a binary operator. Make the expansion closer to the
1556 actual machine, then just general_operand, which will allow 3 separate
1557 memory references (one output, two input) in a single insn. Return
1558 whether the insn fails, or succeeds. */
1561 ix86_expand_binary_operator (code
, mode
, operands
)
1563 enum machine_mode mode
;
1570 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1571 if (GET_RTX_CLASS (code
) == 'c'
1572 && (rtx_equal_p (operands
[0], operands
[2])
1573 || immediate_operand (operands
[1], mode
)))
1575 rtx temp
= operands
[1];
1576 operands
[1] = operands
[2];
1580 /* If optimizing, copy to regs to improve CSE */
1581 if (TARGET_PSEUDO
&& optimize
&& ((reload_in_progress
| reload_completed
) == 0))
1583 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1584 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1586 if (GET_CODE (operands
[2]) == MEM
)
1587 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1589 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1591 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1592 emit_move_insn (temp
, operands
[1]);
1598 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1600 /* If not optimizing, try to make a valid insn (optimize code previously did
1601 this above to improve chances of CSE) */
1603 if ((!TARGET_PSEUDO
|| !optimize
)
1604 && ((reload_in_progress
| reload_completed
) == 0)
1605 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1608 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1610 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1614 if (GET_CODE (operands
[2]) == MEM
)
1616 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1620 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1622 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1623 emit_move_insn (temp
, operands
[1]);
1628 if (modified
&& !ix86_binary_operator_ok (code
, mode
, operands
))
1638 /* Return TRUE or FALSE depending on whether the binary operator meets the
1639 appropriate constraints. */
1642 ix86_binary_operator_ok (code
, mode
, operands
)
1644 enum machine_mode mode
;
1647 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1648 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1651 /* Attempt to expand a unary operator. Make the expansion closer to the
1652 actual machine, then just general_operand, which will allow 2 separate
1653 memory references (one output, one input) in a single insn. Return
1654 whether the insn fails, or succeeds. */
1657 ix86_expand_unary_operator (code
, mode
, operands
)
1659 enum machine_mode mode
;
1664 /* If optimizing, copy to regs to improve CSE */
1667 && ((reload_in_progress
| reload_completed
) == 0)
1668 && GET_CODE (operands
[1]) == MEM
)
1670 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1673 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1675 if ((!TARGET_PSEUDO
|| !optimize
)
1676 && ((reload_in_progress
| reload_completed
) == 0)
1677 && GET_CODE (operands
[1]) == MEM
)
1679 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1680 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1690 /* Return TRUE or FALSE depending on whether the unary operator meets the
1691 appropriate constraints. */
1694 ix86_unary_operator_ok (code
, mode
, operands
)
1696 enum machine_mode mode
;
1704 static rtx pic_label_rtx
;
1705 static char pic_label_name
[256];
1706 static int pic_label_no
= 0;
1708 /* This function generates code for -fpic that loads %ebx with
1709 with the return address of the caller and then returns. */
1711 asm_output_function_prefix (file
, name
)
1716 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1717 || current_function_uses_const_pool
);
1718 xops
[0] = pic_offset_table_rtx
;
1719 xops
[1] = stack_pointer_rtx
;
1721 /* deep branch prediction favors having a return for every call */
1722 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1726 if (pic_label_rtx
== 0)
1728 pic_label_rtx
= (rtx
) gen_label_rtx ();
1729 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1730 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1732 prologue_node
= make_node (FUNCTION_DECL
);
1733 DECL_RESULT (prologue_node
) = 0;
1734 #ifdef ASM_DECLARE_FUNCTION_NAME
1735 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1737 output_asm_insn ("movl (%1),%0", xops
);
1738 output_asm_insn ("ret", xops
);
1742 /* Set up the stack and frame (if desired) for the function. */
1745 function_prologue (file
, size
)
1752 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1753 || current_function_uses_const_pool
);
1754 long tsize
= get_frame_size ();
1756 /* pic references don't explicitly mention pic_offset_table_rtx */
1757 if (TARGET_SCHEDULE_PROLOGUE
)
1763 xops
[0] = stack_pointer_rtx
;
1764 xops
[1] = frame_pointer_rtx
;
1765 xops
[2] = GEN_INT (tsize
);
1767 if (frame_pointer_needed
)
1769 output_asm_insn ("push%L1 %1", xops
);
1770 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
1775 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1776 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
1779 xops
[3] = gen_rtx (REG
, SImode
, 0);
1780 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
1782 xops
[3] = gen_rtx (SYMBOL_REF
, Pmode
, "_alloca");
1783 output_asm_insn (AS1 (call
,%P3
), xops
);
1786 /* Note If use enter it is NOT reversed args.
1787 This one is not reversed from intel!!
1788 I think enter is slower. Also sdb doesn't like it.
1789 But if you want it the code is:
1791 xops[3] = const0_rtx;
1792 output_asm_insn ("enter %2,%3", xops);
1795 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1796 for (regno
= limit
- 1; regno
>= 0; regno
--)
1797 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1798 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1800 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1801 output_asm_insn ("push%L0 %0", xops
);
1804 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1806 xops
[0] = pic_offset_table_rtx
;
1807 xops
[1] = gen_rtx (SYMBOL_REF
, Pmode
, LABEL_NAME (pic_label_rtx
));
1809 output_asm_insn (AS1 (call
,%P1
), xops
);
1810 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1813 else if (pic_reg_used
)
1815 xops
[0] = pic_offset_table_rtx
;
1816 xops
[1] = (rtx
) gen_label_rtx ();
1818 output_asm_insn (AS1 (call
,%P1
), xops
);
1819 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (xops
[1]));
1820 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1821 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1825 /* This function generates the assembly code for function entry.
1826 FILE is an stdio stream to output the code to.
1827 SIZE is an int: how many units of temporary storage to allocate. */
1830 ix86_expand_prologue ()
1835 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1836 || current_function_uses_const_pool
);
1837 long tsize
= get_frame_size ();
1839 if (!TARGET_SCHEDULE_PROLOGUE
)
1842 xops
[0] = stack_pointer_rtx
;
1843 xops
[1] = frame_pointer_rtx
;
1844 xops
[2] = GEN_INT (tsize
);
1845 if (frame_pointer_needed
)
1847 emit_insn (gen_rtx (SET
, 0,
1848 gen_rtx (MEM
, SImode
,
1849 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1850 frame_pointer_rtx
));
1851 emit_move_insn (xops
[1], xops
[0]);
1856 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1857 emit_insn (gen_subsi3 (xops
[0], xops
[0], xops
[2]));
1860 xops
[3] = gen_rtx (REG
, SImode
, 0);
1861 emit_move_insn (xops
[3], xops
[2]);
1862 xops
[3] = gen_rtx (MEM
, FUNCTION_MODE
,
1863 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
1864 emit_call_insn (gen_rtx (CALL
, VOIDmode
,
1865 xops
[3], const0_rtx
));
1868 /* Note If use enter it is NOT reversed args.
1869 This one is not reversed from intel!!
1870 I think enter is slower. Also sdb doesn't like it.
1871 But if you want it the code is:
1873 xops[3] = const0_rtx;
1874 output_asm_insn ("enter %2,%3", xops);
1877 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1878 for (regno
= limit
- 1; regno
>= 0; regno
--)
1879 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1880 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1882 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1883 emit_insn (gen_rtx (SET
, 0,
1884 gen_rtx (MEM
, SImode
,
1885 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1889 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1891 xops
[0] = pic_offset_table_rtx
;
1892 if (pic_label_rtx
== 0)
1894 pic_label_rtx
= (rtx
) gen_label_rtx ();
1895 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1896 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1898 xops
[1] = gen_rtx (MEM
, QImode
, gen_rtx (SYMBOL_REF
, Pmode
, LABEL_NAME (pic_label_rtx
)));
1900 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
1901 emit_insn (gen_prologue_set_got (xops
[0],
1902 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1903 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1905 else if (pic_reg_used
)
1907 xops
[0] = pic_offset_table_rtx
;
1908 xops
[1] = (rtx
) gen_label_rtx ();
1910 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1911 emit_insn (gen_pop (xops
[0]));
1912 emit_insn (gen_prologue_set_got (xops
[0],
1913 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1914 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER (xops
[1]))));
1918 /* Restore function stack, frame, and registers. */
1921 function_epilogue (file
, size
)
1927 /* Return 1 if it is appropriate to emit `ret' instructions in the
1928 body of a function. Do this only if the epilogue is simple, needing a
1929 couple of insns. Prior to reloading, we can't tell how many registers
1930 must be saved, so return 0 then. Return 0 if there is no frame
1931 marker to de-allocate.
1933 If NON_SAVING_SETJMP is defined and true, then it is not possible
1934 for the epilogue to be simple, so return 0. This is a special case
1935 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1936 until final, but jump_optimize may need to know sooner if a
1940 ix86_can_use_return_insn_p ()
1944 int reglimit
= (frame_pointer_needed
1945 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1946 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1947 || current_function_uses_const_pool
);
1949 #ifdef NON_SAVING_SETJMP
1950 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1954 if (! reload_completed
)
1957 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
1958 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1959 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1962 return nregs
== 0 || ! frame_pointer_needed
;
1966 /* This function generates the assembly code for function exit.
1967 FILE is an stdio stream to output the code to.
1968 SIZE is an int: how many units of temporary storage to deallocate. */
1971 ix86_expand_epilogue ()
1974 register int nregs
, limit
;
1977 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1978 || current_function_uses_const_pool
);
1979 long tsize
= get_frame_size ();
1981 /* Compute the number of registers to pop */
1983 limit
= (frame_pointer_needed
1984 ? FRAME_POINTER_REGNUM
1985 : STACK_POINTER_REGNUM
);
1989 for (regno
= limit
- 1; regno
>= 0; regno
--)
1990 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1991 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1994 /* sp is often unreliable so we must go off the frame pointer,
1997 /* In reality, we may not care if sp is unreliable, because we can
1998 restore the register relative to the frame pointer. In theory,
1999 since each move is the same speed as a pop, and we don't need the
2000 leal, this is faster. For now restore multiple registers the old
2003 offset
= -tsize
- (nregs
* UNITS_PER_WORD
);
2005 xops
[2] = stack_pointer_rtx
;
2007 if (nregs
> 1 || ! frame_pointer_needed
)
2009 if (frame_pointer_needed
)
2011 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2012 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2013 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2016 for (regno
= 0; regno
< limit
; regno
++)
2017 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2018 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2020 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2021 emit_insn (gen_pop (xops
[0]));
2022 /* output_asm_insn ("pop%L0 %0", xops);*/
2026 for (regno
= 0; regno
< limit
; regno
++)
2027 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2028 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2030 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2031 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2032 emit_move_insn (xops
[0], xops
[1]);
2033 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2037 if (frame_pointer_needed
)
2039 /* If not an i386, mov & pop is faster than "leave". */
2041 if (TARGET_USE_LEAVE
)
2042 emit_insn (gen_leave());
2043 /* output_asm_insn ("leave", xops);*/
2046 xops
[0] = frame_pointer_rtx
;
2047 xops
[1] = stack_pointer_rtx
;
2048 emit_insn (gen_epilogue_set_stack_ptr());
2049 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2050 emit_insn (gen_pop (xops
[0]));
2051 /* output_asm_insn ("pop%L0 %0", xops);*/
2056 /* If there is no frame pointer, we must still release the frame. */
2058 xops
[0] = GEN_INT (tsize
);
2059 emit_insn (gen_rtx (SET
, SImode
,
2061 gen_rtx (PLUS
, SImode
,
2064 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2067 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2068 if (profile_block_flag
== 2)
2070 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2074 if (current_function_pops_args
&& current_function_args_size
)
2076 xops
[1] = GEN_INT (current_function_pops_args
);
2078 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2079 asked to pop more, pop return address, do explicit add, and jump
2080 indirectly to the caller. */
2082 if (current_function_pops_args
>= 32768)
2084 /* ??? Which register to use here? */
2085 xops
[0] = gen_rtx (REG
, SImode
, 2);
2086 emit_insn (gen_pop (xops
[0]));
2087 /* output_asm_insn ("pop%L0 %0", xops);*/
2088 emit_insn (gen_rtx (SET
, SImode
,
2090 gen_rtx (PLUS
, SImode
,
2093 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2094 emit_jump_insn (xops
[0]);
2095 /* output_asm_insn ("jmp %*%0", xops);*/
2098 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2099 /* output_asm_insn ("ret %1", xops);*/
2102 /* output_asm_insn ("ret", xops);*/
2103 emit_jump_insn (gen_return_internal ());
2107 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2108 that is a valid memory address for an instruction.
2109 The MODE argument is the machine mode for the MEM expression
2110 that wants to use this address.
2112 On x86, legitimate addresses are:
2113 base movl (base),reg
2114 displacement movl disp,reg
2115 base + displacement movl disp(base),reg
2116 index + base movl (base,index),reg
2117 (index + base) + displacement movl disp(base,index),reg
2118 index*scale movl (,index,scale),reg
2119 index*scale + disp movl disp(,index,scale),reg
2120 index*scale + base movl (base,index,scale),reg
2121 (index*scale + base) + disp movl disp(base,index,scale),reg
2123 In each case, scale can be 1, 2, 4, 8. */
2125 /* This is exactly the same as print_operand_addr, except that
2126 it recognizes addresses instead of printing them.
2128 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2129 convert common non-canonical forms to canonical form so that they will
2132 #define ADDR_INVALID(msg,insn) \
2134 if (TARGET_DEBUG_ADDR) \
2136 fprintf (stderr, msg); \
2142 legitimate_address_p (mode
, addr
, strict
)
2143 enum machine_mode mode
;
2147 rtx base
= NULL_RTX
;
2148 rtx indx
= NULL_RTX
;
2149 rtx scale
= NULL_RTX
;
2150 rtx disp
= NULL_RTX
;
2152 if (TARGET_DEBUG_ADDR
)
2155 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2156 GET_MODE_NAME (mode
), strict
);
2161 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2162 base
= addr
; /* base reg */
2164 else if (GET_CODE (addr
) == PLUS
)
2166 rtx op0
= XEXP (addr
, 0);
2167 rtx op1
= XEXP (addr
, 1);
2168 enum rtx_code code0
= GET_CODE (op0
);
2169 enum rtx_code code1
= GET_CODE (op1
);
2171 if (code0
== REG
|| code0
== SUBREG
)
2173 if (code1
== REG
|| code1
== SUBREG
)
2175 indx
= op0
; /* index + base */
2181 base
= op0
; /* base + displacement */
2186 else if (code0
== MULT
)
2188 indx
= XEXP (op0
, 0);
2189 scale
= XEXP (op0
, 1);
2191 if (code1
== REG
|| code1
== SUBREG
)
2192 base
= op1
; /* index*scale + base */
2195 disp
= op1
; /* index*scale + disp */
2198 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2200 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2201 scale
= XEXP (XEXP (op0
, 0), 1);
2202 base
= XEXP (op0
, 1);
2206 else if (code0
== PLUS
)
2208 indx
= XEXP (op0
, 0); /* index + base + disp */
2209 base
= XEXP (op0
, 1);
2215 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2220 else if (GET_CODE (addr
) == MULT
)
2222 indx
= XEXP (addr
, 0); /* index*scale */
2223 scale
= XEXP (addr
, 1);
2227 disp
= addr
; /* displacement */
2229 /* Allow arg pointer and stack pointer as index if there is not scaling */
2230 if (base
&& indx
&& !scale
2231 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2238 /* Validate base register */
2239 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2240 is one word out of a two word structure, which is represented internally
2244 if (GET_CODE (base
) != REG
)
2246 ADDR_INVALID ("Base is not a register.\n", base
);
2250 if ((strict
&& !REG_OK_FOR_BASE_STRICT_P (base
))
2251 || (!strict
&& !REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2253 ADDR_INVALID ("Base is not valid.\n", base
);
2258 /* Validate index register */
2259 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2260 is one word out of a two word structure, which is represented internally
2264 if (GET_CODE (indx
) != REG
)
2266 ADDR_INVALID ("Index is not a register.\n", indx
);
2270 if ((strict
&& !REG_OK_FOR_INDEX_STRICT_P (indx
))
2271 || (!strict
&& !REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2273 ADDR_INVALID ("Index is not valid.\n", indx
);
2278 abort (); /* scale w/o index invalid */
2280 /* Validate scale factor */
2283 HOST_WIDE_INT value
;
2285 if (GET_CODE (scale
) != CONST_INT
)
2287 ADDR_INVALID ("Scale is not valid.\n", scale
);
2291 value
= INTVAL (scale
);
2292 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2294 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2299 /* Validate displacement
2300 Constant pool addresses must be handled special. They are
2301 considered legitimate addresses, but only if not used with regs.
2302 When printed, the output routines know to print the reference with the
2303 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2306 if (GET_CODE (disp
) == SYMBOL_REF
2307 && CONSTANT_POOL_ADDRESS_P (disp
)
2312 else if (!CONSTANT_ADDRESS_P (disp
))
2314 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2318 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2320 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2324 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2325 && base
!= pic_offset_table_rtx
2326 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2328 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2332 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2333 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2335 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp
);
2340 if (TARGET_DEBUG_ADDR
)
2341 fprintf (stderr
, "Address is valid.\n");
2343 /* Everything looks valid, return true */
2348 /* Return a legitimate reference for ORIG (an address) using the
2349 register REG. If REG is 0, a new pseudo is generated.
2351 There are three types of references that must be handled:
2353 1. Global data references must load the address from the GOT, via
2354 the PIC reg. An insn is emitted to do this load, and the reg is
2357 2. Static data references must compute the address as an offset
2358 from the GOT, whose base is in the PIC reg. An insn is emitted to
2359 compute the address into a reg, and the reg is returned. Static
2360 data objects have SYMBOL_REF_FLAG set to differentiate them from
2361 global data objects.
2363 3. Constant pool addresses must be handled special. They are
2364 considered legitimate addresses, but only if not used with regs.
2365 When printed, the output routines know to print the reference with the
2366 PIC reg, even though the PIC reg doesn't appear in the RTL.
2368 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2369 reg also appears in the address (except for constant pool references,
2372 "switch" statements also require special handling when generating
2373 PIC code. See comments by the `casesi' insn in i386.md for details. */
2376 legitimize_pic_address (orig
, reg
)
2383 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2385 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2390 reg
= gen_reg_rtx (Pmode
);
2392 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2393 || GET_CODE (addr
) == LABEL_REF
)
2394 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2396 new = gen_rtx (MEM
, Pmode
,
2397 gen_rtx (PLUS
, Pmode
,
2398 pic_offset_table_rtx
, orig
));
2400 emit_move_insn (reg
, new);
2402 current_function_uses_pic_offset_table
= 1;
2405 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2409 if (GET_CODE (addr
) == CONST
)
2411 addr
= XEXP (addr
, 0);
2412 if (GET_CODE (addr
) != PLUS
)
2416 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2420 reg
= gen_reg_rtx (Pmode
);
2422 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2423 addr
= legitimize_pic_address (XEXP (addr
, 1),
2424 base
== reg
? NULL_RTX
: reg
);
2426 if (GET_CODE (addr
) == CONST_INT
)
2427 return plus_constant (base
, INTVAL (addr
));
2429 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2431 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2432 addr
= XEXP (addr
, 1);
2434 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2440 /* Emit insns to move operands[1] into operands[0]. */
2443 emit_pic_move (operands
, mode
)
2445 enum machine_mode mode
;
2447 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2449 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2450 operands
[1] = (rtx
) force_reg (SImode
, operands
[1]);
2452 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2456 /* Try machine-dependent ways of modifying an illegitimate address
2457 to be legitimate. If we find one, return the new, valid address.
2458 This macro is used in only one place: `memory_address' in explow.c.
2460 OLDX is the address as it was before break_out_memory_refs was called.
2461 In some cases it is useful to look at this to decide what needs to be done.
2463 MODE and WIN are passed so that this macro can use
2464 GO_IF_LEGITIMATE_ADDRESS.
2466 It is always safe for this macro to do nothing. It exists to recognize
2467 opportunities to optimize the output.
2469 For the 80386, we handle X+REG by loading X into a register R and
2470 using R+REG. R will go in a general reg and indexing will be used.
2471 However, if REG is a broken-out memory address or multiplication,
2472 nothing needs to be done because REG can certainly go in a general reg.
2474 When -fpic is used, special handling is needed for symbolic references.
2475 See comments by legitimize_pic_address in i386.c for details. */
2478 legitimize_address (x
, oldx
, mode
)
2481 enum machine_mode mode
;
2486 if (TARGET_DEBUG_ADDR
)
2488 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode
));
2492 if (flag_pic
&& SYMBOLIC_CONST (x
))
2493 return legitimize_pic_address (x
, 0);
2495 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2496 if (GET_CODE (x
) == ASHIFT
2497 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2498 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2501 x
= gen_rtx (MULT
, Pmode
,
2502 force_reg (Pmode
, XEXP (x
, 0)),
2503 GEN_INT (1 << log
));
2506 if (GET_CODE (x
) == PLUS
)
2508 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2509 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2510 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2511 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2514 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2515 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2516 GEN_INT (1 << log
));
2519 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2520 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2521 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2524 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2525 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2526 GEN_INT (1 << log
));
2529 /* Put multiply first if it isn't already */
2530 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2532 rtx tmp
= XEXP (x
, 0);
2533 XEXP (x
, 0) = XEXP (x
, 1);
2538 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2539 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2540 created by virtual register instantiation, register elimination, and
2541 similar optimizations. */
2542 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2545 x
= gen_rtx (PLUS
, Pmode
,
2546 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)),
2547 XEXP (XEXP (x
, 1), 1));
2550 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2551 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2552 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2553 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2554 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2555 && CONSTANT_P (XEXP (x
, 1)))
2557 rtx constant
, other
;
2559 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2561 constant
= XEXP (x
, 1);
2562 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2564 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2566 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2567 other
= XEXP (x
, 1);
2575 x
= gen_rtx (PLUS
, Pmode
,
2576 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2577 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2578 plus_constant (other
, INTVAL (constant
)));
2582 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2585 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2588 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2591 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2594 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2598 && GET_CODE (XEXP (x
, 1)) == REG
2599 && GET_CODE (XEXP (x
, 0)) == REG
)
2602 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2605 x
= legitimize_pic_address (x
, 0);
2608 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2611 if (GET_CODE (XEXP (x
, 0)) == REG
)
2613 register rtx temp
= gen_reg_rtx (Pmode
);
2614 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2616 emit_move_insn (temp
, val
);
2622 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2624 register rtx temp
= gen_reg_rtx (Pmode
);
2625 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2627 emit_move_insn (temp
, val
);
2638 /* Print an integer constant expression in assembler syntax. Addition
2639 and subtraction are the only arithmetic that may appear in these
2640 expressions. FILE is the stdio stream to write to, X is the rtx, and
2641 CODE is the operand print code from the output string. */
2644 output_pic_addr_const (file
, x
, code
)
2651 switch (GET_CODE (x
))
2662 if (GET_CODE (x
) == SYMBOL_REF
)
2663 assemble_name (file
, XSTR (x
, 0));
2666 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2667 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2668 assemble_name (asm_out_file
, buf
);
2671 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2672 fprintf (file
, "@GOTOFF(%%ebx)");
2673 else if (code
== 'P')
2674 fprintf (file
, "@PLT");
2675 else if (GET_CODE (x
) == LABEL_REF
)
2676 fprintf (file
, "@GOTOFF");
2677 else if (! SYMBOL_REF_FLAG (x
))
2678 fprintf (file
, "@GOT");
2680 fprintf (file
, "@GOTOFF");
2685 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2686 assemble_name (asm_out_file
, buf
);
2690 fprintf (file
, "%d", INTVAL (x
));
2694 /* This used to output parentheses around the expression,
2695 but that does not work on the 386 (either ATT or BSD assembler). */
2696 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2700 if (GET_MODE (x
) == VOIDmode
)
2702 /* We can use %d if the number is <32 bits and positive. */
2703 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2704 fprintf (file
, "0x%x%08x",
2705 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2707 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2710 /* We can't handle floating point constants;
2711 PRINT_OPERAND must handle them. */
2712 output_operand_lossage ("floating constant misused");
2716 /* Some assemblers need integer constants to appear last (eg masm). */
2717 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
2719 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2720 if (INTVAL (XEXP (x
, 0)) >= 0)
2721 fprintf (file
, "+");
2722 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2726 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2727 if (INTVAL (XEXP (x
, 1)) >= 0)
2728 fprintf (file
, "+");
2729 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2734 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2735 fprintf (file
, "-");
2736 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2740 output_operand_lossage ("invalid expression as operand");
2744 /* Append the correct conditional move suffix which corresponds to CODE */
2747 put_condition_code (code
, mode
, file
)
2749 enum mode_class mode
;
2752 if (mode
== MODE_INT
)
2756 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2762 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2768 fputs ("ge", file
); return;
2770 fputs ("g", file
); return;
2772 fputs ("le", file
); return;
2774 fputs ("l", file
); return;
2776 fputs ("ae", file
); return;
2778 fputs ("a", file
); return;
2780 fputs ("be", file
); return;
2782 fputs ("b", file
); return;
2783 default: output_operand_lossage ("Invalid %%C operand");
2785 else if (mode
== MODE_FLOAT
)
2789 fputs ("ne", file
); return;
2791 fputs ("e", file
); return;
2793 fputs ("nb", file
); return;
2795 fputs ("nbe", file
); return;
2797 fputs ("be", file
); return;
2799 fputs ("b", file
); return;
2801 fputs ("nb", file
); return;
2803 fputs ("nbe", file
); return;
2805 fputs ("be", file
); return;
2807 fputs ("b", file
); return;
2808 default: output_operand_lossage ("Invalid %%C operand");
2813 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2814 C -- print opcode suffix for set/cmov insn.
2815 c -- like C, but print reversed condition
2816 F -- print opcode suffix for fcmov insn.
2817 f -- like C, but print reversed condition
2818 R -- print the prefix for register names.
2819 z -- print the opcode suffix for the size of the current operand.
2820 * -- print a star (in certain assembler syntax)
2821 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2822 c -- don't print special prefixes before constant operands.
2823 J -- print the appropriate jump operand.
2824 s -- print a shift double count, followed by the assemblers argument
2826 b -- print the QImode name of the register for the indicated operand.
2827 %b0 would print %al if operands[0] is reg 0.
2828 w -- likewise, print the HImode name of the register.
2829 k -- likewise, print the SImode name of the register.
2830 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2831 y -- print "st(0)" instead of "st" as a register.
2832 P -- print as a PIC constant
2836 print_operand (file
, x
, code
)
2851 PUT_OP_SIZE (code
, 'l', file
);
2855 PUT_OP_SIZE (code
, 'w', file
);
2859 PUT_OP_SIZE (code
, 'b', file
);
2863 PUT_OP_SIZE (code
, 'l', file
);
2867 PUT_OP_SIZE (code
, 's', file
);
2871 PUT_OP_SIZE (code
, 't', file
);
2875 /* 387 opcodes don't get size suffixes if the operands are
2878 if (STACK_REG_P (x
))
2881 /* this is the size of op from size of operand */
2882 switch (GET_MODE_SIZE (GET_MODE (x
)))
2885 PUT_OP_SIZE ('B', 'b', file
);
2889 PUT_OP_SIZE ('W', 'w', file
);
2893 if (GET_MODE (x
) == SFmode
)
2895 PUT_OP_SIZE ('S', 's', file
);
2899 PUT_OP_SIZE ('L', 'l', file
);
2903 PUT_OP_SIZE ('T', 't', file
);
2907 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
2909 #ifdef GAS_MNEMONICS
2910 PUT_OP_SIZE ('Q', 'q', file
);
2913 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
2917 PUT_OP_SIZE ('Q', 'l', file
);
2930 switch (GET_CODE (x
))
2932 /* These conditions are appropriate for testing the result
2933 of an arithmetic operation, not for a compare operation.
2934 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2935 CC_Z_IN_NOT_C false and not floating point. */
2936 case NE
: fputs ("jne", file
); return;
2937 case EQ
: fputs ("je", file
); return;
2938 case GE
: fputs ("jns", file
); return;
2939 case LT
: fputs ("js", file
); return;
2940 case GEU
: fputs ("jmp", file
); return;
2941 case GTU
: fputs ("jne", file
); return;
2942 case LEU
: fputs ("je", file
); return;
2943 case LTU
: fputs ("#branch never", file
); return;
2945 /* no matching branches for GT nor LE */
2950 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
2952 PRINT_OPERAND (file
, x
, 0);
2953 fputs (AS2C (,) + 1, file
);
2957 /* This is used by the conditional move instructions. */
2959 put_condition_code (GET_CODE (x
), MODE_INT
, file
);
2962 /* like above, but reverse condition */
2964 put_condition_code (reverse_condition (GET_CODE (x
)), MODE_INT
, file
);
2968 put_condition_code (GET_CODE (x
), MODE_FLOAT
, file
);
2971 /* like above, but reverse condition */
2973 put_condition_code (reverse_condition (GET_CODE (x
)),
2981 sprintf (str
, "invalid operand code `%c'", code
);
2982 output_operand_lossage (str
);
2986 if (GET_CODE (x
) == REG
)
2988 PRINT_REG (x
, code
, file
);
2990 else if (GET_CODE (x
) == MEM
)
2992 PRINT_PTR (x
, file
);
2993 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
2996 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2998 output_addr_const (file
, XEXP (x
, 0));
3001 output_address (XEXP (x
, 0));
3003 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3005 REAL_VALUE_TYPE r
; long l
;
3006 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3007 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3008 PRINT_IMMED_PREFIX (file
);
3009 fprintf (file
, "0x%x", l
);
3011 /* These float cases don't actually occur as immediate operands. */
3012 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3014 REAL_VALUE_TYPE r
; char dstr
[30];
3015 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3016 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3017 fprintf (file
, "%s", dstr
);
3019 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3021 REAL_VALUE_TYPE r
; char dstr
[30];
3022 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3023 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3024 fprintf (file
, "%s", dstr
);
3030 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3031 PRINT_IMMED_PREFIX (file
);
3032 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3033 || GET_CODE (x
) == LABEL_REF
)
3034 PRINT_OFFSET_PREFIX (file
);
3037 output_pic_addr_const (file
, x
, code
);
3039 output_addr_const (file
, x
);
3043 /* Print a memory operand whose address is ADDR. */
3046 print_operand_address (file
, addr
)
3050 register rtx reg1
, reg2
, breg
, ireg
;
3053 switch (GET_CODE (addr
))
3057 fprintf (file
, "%se", RP
);
3058 fputs (hi_reg_name
[REGNO (addr
)], file
);
3068 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3070 offset
= XEXP (addr
, 0);
3071 addr
= XEXP (addr
, 1);
3073 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3075 offset
= XEXP (addr
, 1);
3076 addr
= XEXP (addr
, 0);
3078 if (GET_CODE (addr
) != PLUS
) ;
3079 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3081 reg1
= XEXP (addr
, 0);
3082 addr
= XEXP (addr
, 1);
3084 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3086 reg1
= XEXP (addr
, 1);
3087 addr
= XEXP (addr
, 0);
3089 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3091 reg1
= XEXP (addr
, 0);
3092 addr
= XEXP (addr
, 1);
3094 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3096 reg1
= XEXP (addr
, 1);
3097 addr
= XEXP (addr
, 0);
3099 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3101 if (reg1
== 0) reg1
= addr
;
3107 if (addr
!= 0) abort ();
3110 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3111 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3116 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3122 if (ireg
!= 0 || breg
!= 0)
3129 output_pic_addr_const (file
, addr
, 0);
3131 else if (GET_CODE (addr
) == LABEL_REF
)
3132 output_asm_label (addr
);
3135 output_addr_const (file
, addr
);
3138 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3140 scale
= INTVAL (XEXP (ireg
, 1));
3141 ireg
= XEXP (ireg
, 0);
3144 /* The stack pointer can only appear as a base register,
3145 never an index register, so exchange the regs if it is wrong. */
3147 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3156 /* output breg+ireg*scale */
3157 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3164 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3166 scale
= INTVAL (XEXP (addr
, 0));
3167 ireg
= XEXP (addr
, 1);
3171 scale
= INTVAL (XEXP (addr
, 1));
3172 ireg
= XEXP (addr
, 0);
3174 output_addr_const (file
, const0_rtx
);
3175 PRINT_B_I_S ((rtx
) 0, ireg
, scale
, file
);
3180 if (GET_CODE (addr
) == CONST_INT
3181 && INTVAL (addr
) < 0x8000
3182 && INTVAL (addr
) >= -0x8000)
3183 fprintf (file
, "%d", INTVAL (addr
));
3187 output_pic_addr_const (file
, addr
, 0);
3189 output_addr_const (file
, addr
);
3194 /* Set the cc_status for the results of an insn whose pattern is EXP.
3195 On the 80386, we assume that only test and compare insns, as well
3196 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3197 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3198 Also, we assume that jumps, moves and sCOND don't affect the condition
3199 codes. All else clobbers the condition codes, by assumption.
3201 We assume that ALL integer add, minus, etc. instructions effect the
3202 condition codes. This MUST be consistent with i386.md.
3204 We don't record any float test or compare - the redundant test &
3205 compare check in final.c does not handle stack-like regs correctly. */
3208 notice_update_cc (exp
)
3211 if (GET_CODE (exp
) == SET
)
3213 /* Jumps do not alter the cc's. */
3214 if (SET_DEST (exp
) == pc_rtx
)
3216 #ifdef IS_STACK_MODE
3217 /* Moving into a memory of stack_mode may have been moved
3218 in between the use and set of cc0 by loop_spl(). So
3219 old value of cc.status must be retained */
3220 if(GET_CODE(SET_DEST(exp
))==MEM
3221 && IS_STACK_MODE(GET_MODE(SET_DEST(exp
))))
3226 /* Moving register or memory into a register:
3227 it doesn't alter the cc's, but it might invalidate
3228 the RTX's which we remember the cc's came from.
3229 (Note that moving a constant 0 or 1 MAY set the cc's). */
3230 if (REG_P (SET_DEST (exp
))
3231 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3232 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3234 if (cc_status
.value1
3235 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3236 cc_status
.value1
= 0;
3237 if (cc_status
.value2
3238 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3239 cc_status
.value2
= 0;
3242 /* Moving register into memory doesn't alter the cc's.
3243 It may invalidate the RTX's which we remember the cc's came from. */
3244 if (GET_CODE (SET_DEST (exp
)) == MEM
3245 && (REG_P (SET_SRC (exp
))
3246 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3248 if (cc_status
.value1
&& GET_CODE (cc_status
.value1
) == MEM
3249 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3250 cc_status
.value1
= 0;
3251 if (cc_status
.value2
&& GET_CODE (cc_status
.value2
) == MEM
3252 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3253 cc_status
.value2
= 0;
3256 /* Function calls clobber the cc's. */
3257 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3262 /* Tests and compares set the cc's in predictable ways. */
3263 else if (SET_DEST (exp
) == cc0_rtx
)
3266 cc_status
.value1
= SET_SRC (exp
);
3269 /* Certain instructions effect the condition codes. */
3270 else if (GET_MODE (SET_SRC (exp
)) == SImode
3271 || GET_MODE (SET_SRC (exp
)) == HImode
3272 || GET_MODE (SET_SRC (exp
)) == QImode
)
3273 switch (GET_CODE (SET_SRC (exp
)))
3275 case ASHIFTRT
: case LSHIFTRT
:
3277 /* Shifts on the 386 don't set the condition codes if the
3278 shift count is zero. */
3279 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3284 /* We assume that the CONST_INT is non-zero (this rtx would
3285 have been deleted if it were zero. */
3287 case PLUS
: case MINUS
: case NEG
:
3288 case AND
: case IOR
: case XOR
:
3289 cc_status
.flags
= CC_NO_OVERFLOW
;
3290 cc_status
.value1
= SET_SRC (exp
);
3291 cc_status
.value2
= SET_DEST (exp
);
3302 else if (GET_CODE (exp
) == PARALLEL
3303 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3305 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3307 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3310 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3311 cc_status
.flags
|= CC_IN_80387
;
3313 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3324 /* Split one or more DImode RTL references into pairs of SImode
3325 references. The RTL can be REG, offsettable MEM, integer constant, or
3326 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3327 split and "num" is its length. lo_half and hi_half are output arrays
3328 that parallel "operands". */
3331 split_di (operands
, num
, lo_half
, hi_half
)
3334 rtx lo_half
[], hi_half
[];
3338 if (GET_CODE (operands
[num
]) == REG
)
3340 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3341 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3343 else if (CONSTANT_P (operands
[num
]))
3345 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3347 else if (offsettable_memref_p (operands
[num
]))
3349 lo_half
[num
] = operands
[num
];
3350 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3357 /* Return 1 if this is a valid binary operation on a 387.
3358 OP is the expression matched, and MODE is its mode. */
3361 binary_387_op (op
, mode
)
3363 enum machine_mode mode
;
3365 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3368 switch (GET_CODE (op
))
3374 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3382 /* Return 1 if this is a valid shift or rotate operation on a 386.
3383 OP is the expression matched, and MODE is its mode. */
3388 enum machine_mode mode
;
3390 rtx operand
= XEXP (op
, 0);
3392 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3395 if (GET_MODE (operand
) != GET_MODE (op
)
3396 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3399 return (GET_CODE (op
) == ASHIFT
3400 || GET_CODE (op
) == ASHIFTRT
3401 || GET_CODE (op
) == LSHIFTRT
3402 || GET_CODE (op
) == ROTATE
3403 || GET_CODE (op
) == ROTATERT
);
3406 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3407 MODE is not used. */
3410 VOIDmode_compare_op (op
, mode
)
3412 enum machine_mode mode
;
3414 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3417 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3418 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3419 is the expression of the binary operation. The output may either be
3420 emitted here, or returned to the caller, like all output_* functions.
3422 There is no guarantee that the operands are the same mode, as they
3423 might be within FLOAT or FLOAT_EXTEND expressions. */
3426 output_387_binary_op (insn
, operands
)
3432 static char buf
[100];
3434 switch (GET_CODE (operands
[3]))
3437 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3438 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3445 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3446 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3453 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3454 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3461 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3462 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3472 strcpy (buf
, base_op
);
3474 switch (GET_CODE (operands
[3]))
3478 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3481 operands
[2] = operands
[1];
3485 if (GET_CODE (operands
[2]) == MEM
)
3486 return strcat (buf
, AS1 (%z2
,%2));
3488 if (NON_STACK_REG_P (operands
[1]))
3490 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3493 else if (NON_STACK_REG_P (operands
[2]))
3495 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3499 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3500 return strcat (buf
, AS2 (p
,%2,%0));
3502 if (STACK_TOP_P (operands
[0]))
3503 return strcat (buf
, AS2C (%y2
,%0));
3505 return strcat (buf
, AS2C (%2,%0));
3509 if (GET_CODE (operands
[1]) == MEM
)
3510 return strcat (buf
, AS1 (r
%z1
,%1));
3512 if (GET_CODE (operands
[2]) == MEM
)
3513 return strcat (buf
, AS1 (%z2
,%2));
3515 if (NON_STACK_REG_P (operands
[1]))
3517 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3520 else if (NON_STACK_REG_P (operands
[2]))
3522 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3526 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3529 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3530 return strcat (buf
, AS2 (rp
,%2,%0));
3532 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3533 return strcat (buf
, AS2 (p
,%1,%0));
3535 if (STACK_TOP_P (operands
[0]))
3537 if (STACK_TOP_P (operands
[1]))
3538 return strcat (buf
, AS2C (%y2
,%0));
3540 return strcat (buf
, AS2 (r
,%y1
,%0));
3542 else if (STACK_TOP_P (operands
[1]))
3543 return strcat (buf
, AS2C (%1,%0));
3545 return strcat (buf
, AS2 (r
,%2,%0));
3552 /* Output code for INSN to convert a float to a signed int. OPERANDS
3553 are the insn operands. The output may be SFmode or DFmode and the
3554 input operand may be SImode or DImode. As a special case, make sure
3555 that the 387 stack top dies if the output mode is DImode, because the
3556 hardware requires this. */
3559 output_fix_trunc (insn
, operands
)
3563 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3566 if (! STACK_TOP_P (operands
[1]) ||
3567 (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3570 xops
[0] = GEN_INT (12);
3571 xops
[1] = operands
[4];
3573 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3574 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3575 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3576 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3577 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3579 if (NON_STACK_REG_P (operands
[0]))
3580 output_to_reg (operands
[0], stack_top_dies
);
3581 else if (GET_CODE (operands
[0]) == MEM
)
3584 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3586 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3591 return AS1 (fldc
%W2
,%2);
3594 /* Output code for INSN to compare OPERANDS. The two operands might
3595 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3596 expression. If the compare is in mode CCFPEQmode, use an opcode that
3597 will not fault if a qNaN is present. */
3600 output_float_compare (insn
, operands
)
3605 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3606 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3607 int target_fcomi
= TARGET_CMOVE
&& STACK_REG_P (operands
[1]);
3610 if (! STACK_TOP_P (operands
[0]))
3613 operands
[0] = operands
[1];
3615 cc_status
.flags
|= CC_REVERSED
;
3618 if (! STACK_TOP_P (operands
[0]))
3621 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3623 if (STACK_REG_P (operands
[1])
3625 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3626 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3628 /* If both the top of the 387 stack dies, and the other operand
3629 is also a stack register that dies, then this must be a
3630 `fcompp' float compare */
3632 if (unordered_compare
)
3633 output_asm_insn ("fucompp", operands
);
3635 output_asm_insn ("fcompp", operands
);
3639 static char buf
[100];
3641 /* Decide if this is the integer or float compare opcode, or the
3642 unordered float compare. */
3644 if (unordered_compare
)
3645 strcpy (buf
, target_fcomi
? "fucomi" : "fucom");
3646 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
3647 strcpy (buf
, target_fcomi
? "fcomi" : "fcom");
3649 strcpy (buf
, "ficom");
3651 /* Modify the opcode if the 387 stack is to be popped. */
3656 if (NON_STACK_REG_P (operands
[1]))
3657 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3658 else if (target_fcomi
)
3662 xops
[0] = operands
[0];
3663 xops
[1] = operands
[1];
3664 xops
[2] = operands
[0];
3666 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%2)), xops
);
3670 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
3673 /* Now retrieve the condition code. */
3675 return output_fp_cc0_set (insn
);
3678 /* Output opcodes to transfer the results of FP compare or test INSN
3679 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3680 result of the compare or test is unordered, no comparison operator
3681 succeeds except NE. Return an output template, if any. */
3684 output_fp_cc0_set (insn
)
3688 rtx unordered_label
;
3692 xops
[0] = gen_rtx (REG
, HImode
, 0);
3693 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
3695 if (! TARGET_IEEE_FP
)
3697 if (!(cc_status
.flags
& CC_REVERSED
))
3699 next
= next_cc0_user (insn
);
3701 if (GET_CODE (next
) == JUMP_INSN
3702 && GET_CODE (PATTERN (next
)) == SET
3703 && SET_DEST (PATTERN (next
)) == pc_rtx
3704 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3706 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3708 else if (GET_CODE (PATTERN (next
)) == SET
)
3710 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3716 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
3717 || code
== LE
|| code
== GE
)
3718 { /* We will test eax directly */
3719 cc_status
.flags
|= CC_TEST_AX
;
3726 next
= next_cc0_user (insn
);
3727 if (next
== NULL_RTX
)
3730 if (GET_CODE (next
) == JUMP_INSN
3731 && GET_CODE (PATTERN (next
)) == SET
3732 && SET_DEST (PATTERN (next
)) == pc_rtx
3733 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3735 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3737 else if (GET_CODE (PATTERN (next
)) == SET
)
3739 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3740 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3741 else code
= GET_CODE (SET_SRC (PATTERN (next
)));
3746 xops
[0] = gen_rtx (REG
, QImode
, 0);
3751 xops
[1] = GEN_INT (0x45);
3752 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3757 xops
[1] = GEN_INT (0x45);
3758 xops
[2] = GEN_INT (0x01);
3759 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3760 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3765 xops
[1] = GEN_INT (0x05);
3766 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3771 xops
[1] = GEN_INT (0x45);
3772 xops
[2] = GEN_INT (0x40);
3773 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3774 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
3775 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3780 xops
[1] = GEN_INT (0x45);
3781 xops
[2] = GEN_INT (0x40);
3782 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3783 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3788 xops
[1] = GEN_INT (0x44);
3789 xops
[2] = GEN_INT (0x40);
3790 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3791 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
3805 #define MAX_386_STACK_LOCALS 2
3807 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3809 /* Define the structure for the machine field in struct function. */
3810 struct machine_function
3812 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3815 /* Functions to save and restore i386_stack_locals.
3816 These will be called, via pointer variables,
3817 from push_function_context and pop_function_context. */
3820 save_386_machine_status (p
)
3823 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
3824 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
3825 sizeof i386_stack_locals
);
3829 restore_386_machine_status (p
)
3832 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
3833 sizeof i386_stack_locals
);
3837 /* Clear stack slot assignments remembered from previous functions.
3838 This is called from INIT_EXPANDERS once before RTL is emitted for each
3842 clear_386_stack_locals ()
3844 enum machine_mode mode
;
3847 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
3848 mode
= (enum machine_mode
) ((int) mode
+ 1))
3849 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
3850 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
3852 /* Arrange to save and restore i386_stack_locals around nested functions. */
3853 save_machine_status
= save_386_machine_status
;
3854 restore_machine_status
= restore_386_machine_status
;
3857 /* Return a MEM corresponding to a stack slot with mode MODE.
3858 Allocate a new slot if necessary.
3860 The RTL for a function can have several slots available: N is
3861 which slot to use. */
3864 assign_386_stack_local (mode
, n
)
3865 enum machine_mode mode
;
3868 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
3871 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
3872 i386_stack_locals
[(int) mode
][n
]
3873 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
3875 return i386_stack_locals
[(int) mode
][n
];
3881 enum machine_mode mode
;
3883 return (GET_CODE (op
) == MULT
);
3888 enum machine_mode mode
;
3890 return (GET_CODE (op
) == DIV
);
3895 /* Create a new copy of an rtx.
3896 Recursively copies the operands of the rtx,
3897 except for those few rtx codes that are sharable.
3898 Doesn't share CONST */
3906 register RTX_CODE code
;
3907 register char *format_ptr
;
3909 code
= GET_CODE (orig
);
3922 /* SCRATCH must be shared because they represent distinct values. */
3927 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3928 a LABEL_REF, it isn't sharable. */
3929 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3930 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
3931 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
3935 /* A MEM with a constant address is not sharable. The problem is that
3936 the constant address may need to be reloaded. If the mem is shared,
3937 then reloading one copy of this mem will cause all copies to appear
3938 to have been reloaded. */
3941 copy
= rtx_alloc (code
);
3942 PUT_MODE (copy
, GET_MODE (orig
));
3943 copy
->in_struct
= orig
->in_struct
;
3944 copy
->volatil
= orig
->volatil
;
3945 copy
->unchanging
= orig
->unchanging
;
3946 copy
->integrated
= orig
->integrated
;
3948 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
3950 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
3952 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
3954 switch (*format_ptr
++)
3957 XEXP (copy
, i
) = XEXP (orig
, i
);
3958 if (XEXP (orig
, i
) != NULL
)
3959 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
3964 XEXP (copy
, i
) = XEXP (orig
, i
);
3969 XVEC (copy
, i
) = XVEC (orig
, i
);
3970 if (XVEC (orig
, i
) != NULL
)
3972 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
3973 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
3974 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
3979 XWINT (copy
, i
) = XWINT (orig
, i
);
3983 XINT (copy
, i
) = XINT (orig
, i
);
3988 XSTR (copy
, i
) = XSTR (orig
, i
);
3999 /* try to rewrite a memory address to make it valid */
4001 rewrite_address (mem_rtx
)
4004 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4006 int offset_adjust
= 0;
4007 int was_only_offset
= 0;
4008 rtx mem_addr
= XEXP (mem_rtx
, 0);
4009 char *storage
= (char *) oballoc (0);
4011 int is_spill_rtx
= 0;
4013 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4014 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4016 if (GET_CODE (mem_addr
) == PLUS
&&
4017 GET_CODE (XEXP (mem_addr
, 1)) == PLUS
&&
4018 GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4019 { /* this part is utilized by the combiner */
4021 gen_rtx (PLUS
, GET_MODE (mem_addr
),
4022 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4024 XEXP (XEXP (mem_addr
, 1), 0)),
4025 XEXP (XEXP (mem_addr
, 1), 1));
4026 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4028 XEXP (mem_rtx
, 0) = ret_rtx
;
4029 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4035 /* this part is utilized by loop.c */
4036 /* If the address contains PLUS (reg,const) and this pattern is invalid
4037 in this case - try to rewrite the address to make it valid intel1
4039 storage
= (char *) oballoc (0);
4040 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4041 /* find the base index and offset elements of the memory address */
4042 if (GET_CODE (mem_addr
) == PLUS
)
4044 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4046 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4048 base_rtx
= XEXP (mem_addr
, 1);
4049 index_rtx
= XEXP (mem_addr
, 0);
4053 base_rtx
= XEXP (mem_addr
, 0);
4054 offset_rtx
= XEXP (mem_addr
, 1);
4057 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4059 index_rtx
= XEXP (mem_addr
, 0);
4060 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4062 base_rtx
= XEXP (mem_addr
, 1);
4066 offset_rtx
= XEXP (mem_addr
, 1);
4069 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4072 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
&&
4073 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
&&
4074 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0)) == REG
&&
4075 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1)) == CONST_INT
&&
4076 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1)) == CONST_INT
&&
4077 GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
&&
4078 GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4080 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4081 offset_rtx
= XEXP (mem_addr
, 1);
4082 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4083 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4087 offset_rtx
= XEXP (mem_addr
, 1);
4088 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4089 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4092 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4094 was_only_offset
= 1;
4097 offset_rtx
= XEXP (mem_addr
, 1);
4098 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4099 if (offset_adjust
== 0)
4101 XEXP (mem_rtx
, 0) = offset_rtx
;
4102 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4112 else if (GET_CODE (mem_addr
) == MULT
)
4114 index_rtx
= mem_addr
;
4121 if (index_rtx
&& GET_CODE (index_rtx
) == MULT
)
4123 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4128 scale_rtx
= XEXP (index_rtx
, 1);
4129 scale
= INTVAL (scale_rtx
);
4130 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4132 /* now find which of the elements are invalid and try to fix them */
4133 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4135 offset_adjust
= INTVAL (index_rtx
) * scale
;
4136 if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST
&&
4137 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4139 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4140 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4142 offset_rtx
= copy_all_rtx (offset_rtx
);
4143 XEXP (XEXP (offset_rtx
, 0), 1) =
4144 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4145 if (!CONSTANT_P (offset_rtx
))
4152 else if (offset_rtx
&& GET_CODE (offset_rtx
) == SYMBOL_REF
)
4155 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4156 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4158 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4159 if (!CONSTANT_P (offset_rtx
))
4165 else if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST_INT
)
4167 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4169 else if (!offset_rtx
)
4171 offset_rtx
= gen_rtx (CONST_INT
, 0, 0);
4173 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4174 XEXP (mem_rtx
, 0) = offset_rtx
;
4177 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
&&
4178 GET_CODE (XEXP (base_rtx
, 0)) == REG
&&
4179 GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4181 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4182 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4184 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4186 offset_adjust
+= INTVAL (base_rtx
);
4189 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
&&
4190 GET_CODE (XEXP (index_rtx
, 0)) == REG
&&
4191 GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4193 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4194 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4198 if (!LEGITIMATE_INDEX_P (index_rtx
)
4199 && !(index_rtx
== stack_pointer_rtx
&& scale
== 1 && base_rtx
== NULL
))
4207 if (!LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4213 if (offset_adjust
!= 0)
4217 if (GET_CODE (offset_rtx
) == CONST
&&
4218 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4220 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4221 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4223 offset_rtx
= copy_all_rtx (offset_rtx
);
4224 XEXP (XEXP (offset_rtx
, 0), 1) =
4225 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4226 if (!CONSTANT_P (offset_rtx
))
4233 else if (GET_CODE (offset_rtx
) == SYMBOL_REF
)
4236 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4237 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4239 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4240 if (!CONSTANT_P (offset_rtx
))
4246 else if (GET_CODE (offset_rtx
) == CONST_INT
)
4248 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4258 offset_rtx
= gen_rtx (CONST_INT
, 0, offset_adjust
);
4266 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4267 INTVAL (offset_rtx
) == 0)
4269 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4270 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4276 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4277 gen_rtx (PLUS
, GET_MODE (base_rtx
),
4278 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4286 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4287 INTVAL (offset_rtx
) == 0)
4289 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, base_rtx
);
4293 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4294 gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
,
4304 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4305 INTVAL (offset_rtx
) == 0)
4307 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
, scale_rtx
);
4312 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4313 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4320 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4321 INTVAL (offset_rtx
) == 0)
4323 ret_rtx
= index_rtx
;
4327 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, offset_rtx
);
4336 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4337 INTVAL (offset_rtx
) == 0)
4343 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
, offset_rtx
);
4346 else if (was_only_offset
)
4348 ret_rtx
= offset_rtx
;
4356 XEXP (mem_rtx
, 0) = ret_rtx
;
4357 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4369 /* return 1 if the first insn to set cc before insn also sets the register
4370 reg_rtx - otherwise return 0 */
4372 last_to_set_cc (reg_rtx
, insn
)
4375 rtx prev_insn
= PREV_INSN (insn
);
4379 if (GET_CODE (prev_insn
) == NOTE
)
4382 else if (GET_CODE (prev_insn
) == INSN
)
4384 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4387 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4389 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4395 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4402 prev_insn
= PREV_INSN (prev_insn
);
4410 doesnt_set_condition_code (pat
)
4413 switch (GET_CODE (pat
))
4427 sets_condition_code (pat
)
4430 switch (GET_CODE (pat
))
4454 str_immediate_operand (op
, mode
)
4456 enum machine_mode mode
;
4458 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4470 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4471 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4472 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4473 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4482 Return 1 if the mode of the SET_DEST of insn is floating point
4483 and it is not an fld or a move from memory to memory.
4484 Otherwise return 0 */
4489 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4490 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4491 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4492 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4493 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4494 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4495 && GET_CODE (SET_SRC (insn
)) != MEM
)
4504 Return 1 if the mode of the SET_DEST floating point and is memory
4505 and the source is a register.
4511 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4512 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4513 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4514 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4515 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4516 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4526 Return 1 if dep_insn sets a register which insn uses as a base
4527 or index to reference memory.
4528 otherwise return 0 */
4531 agi_dependent (insn
, dep_insn
)
4534 if (GET_CODE (dep_insn
) == INSN
4535 && GET_CODE (PATTERN (dep_insn
)) == SET
4536 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4538 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
));
4541 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4542 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4543 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4544 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4546 return (reg_mentioned_in_mem (stack_pointer_rtx
, insn
));
4554 Return 1 if reg is used in rtl as a base or index for a memory ref
4555 otherwise return 0. */
4558 reg_mentioned_in_mem (reg
, rtl
)
4563 register enum rtx_code code
;
4568 code
= GET_CODE (rtl
);
4586 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4589 fmt
= GET_RTX_FORMAT (code
);
4590 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4595 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4597 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4602 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4609 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4611 operands[0] = result, initialized with the startaddress
4612 operands[1] = alignment of the address.
4613 operands[2] = scratch register, initialized with the startaddress when
4614 not aligned, otherwise undefined
4616 This is just the body. It needs the initialisations mentioned above and
4617 some address computing at the end. These things are done in i386.md. */
4620 output_strlen_unroll (operands
)
4625 xops
[0] = operands
[0]; /* Result */
4626 /* operands[1]; * Alignment */
4627 xops
[1] = operands
[2]; /* Scratch */
4628 xops
[2] = GEN_INT (0);
4629 xops
[3] = GEN_INT (2);
4630 xops
[4] = GEN_INT (3);
4631 xops
[5] = GEN_INT (4);
4632 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4633 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4634 xops
[8] = gen_label_rtx (); /* label of main loop */
4635 if(TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4636 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4637 xops
[10] = gen_label_rtx (); /* end label 2 */
4638 xops
[11] = gen_label_rtx (); /* end label 1 */
4639 xops
[12] = gen_label_rtx (); /* end label */
4640 /* xops[13] * Temporary used */
4641 xops
[14] = GEN_INT (0xff);
4642 xops
[15] = GEN_INT (0xff00);
4643 xops
[16] = GEN_INT (0xff0000);
4644 xops
[17] = GEN_INT (0xff000000);
4646 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4648 /* is there a known alignment and is it less then 4 */
4649 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4651 /* is there a known alignment and is it not 2 */
4652 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4654 xops
[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4655 xops
[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4657 /* leave just the 3 lower bits */
4658 /* if this is a q-register, then the high part is used later */
4659 /* therefore user andl rather than andb */
4660 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4661 /* is aligned to 4-byte adress when zero */
4662 output_asm_insn (AS1 (je
,%l8
), xops
);
4663 /* side-effect even Parity when %eax == 3 */
4664 output_asm_insn (AS1 (jp
,%6), xops
);
4666 /* is it aligned to 2 bytes ? */
4667 if (QI_REG_P (xops
[1]))
4668 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4670 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4671 output_asm_insn (AS1 (je
,%7), xops
);
4675 /* since the alignment is 2, we have to check 2 or 0 bytes */
4677 /* check if is aligned to 4 - byte */
4678 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4679 /* is aligned to 4-byte adress when zero */
4680 output_asm_insn (AS1 (je
,%l8
), xops
);
4683 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4684 /* now, compare the bytes */
4685 /* compare with the high part of a q-reg gives shorter code */
4686 if (QI_REG_P (xops
[1]))
4688 /* compare the first n unaligned byte on a byte per byte basis */
4689 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4690 /* when zero we reached the end */
4691 output_asm_insn (AS1 (je
,%l12
), xops
);
4692 /* increment the address */
4693 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4695 /* not needed with an alignment of 2 */
4696 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4698 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4699 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4700 output_asm_insn (AS1 (je
,%l12
), xops
);
4701 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4703 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4705 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4709 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4710 output_asm_insn (AS1 (je
,%l12
), xops
);
4711 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4713 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4714 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4715 output_asm_insn (AS1 (je
,%l12
), xops
);
4716 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4718 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4719 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4721 output_asm_insn (AS1 (je
,%l12
), xops
);
4722 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4725 /* Generate loop to check 4 bytes at a time */
4726 /* IMHO it is not a good idea to align this loop. It gives only */
4727 /* huge programs, but does not help to speed up */
4728 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4729 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4731 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4732 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4734 if (QI_REG_P (xops
[1]))
4736 /* On i586 it is faster to combine the hi- and lo- part as
4737 a kind of lookahead. If anding both yields zero, then one
4738 of both *could* be zero, otherwise none of both is zero;
4739 this saves one instruction, on i486 this is slower
4740 tested with P-90, i486DX2-66, AMD486DX2-66 */
4743 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
4744 output_asm_insn (AS1 (jne
,%l9
), xops
);
4747 /* check first byte */
4748 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
4749 output_asm_insn (AS1 (je
,%l12
), xops
);
4751 /* check second byte */
4752 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
4753 output_asm_insn (AS1 (je
,%l11
), xops
);
4756 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[9]));
4760 /* check first byte */
4761 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
4762 output_asm_insn (AS1 (je
,%l12
), xops
);
4764 /* check second byte */
4765 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
4766 output_asm_insn (AS1 (je
,%l11
), xops
);
4769 /* check third byte */
4770 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
4771 output_asm_insn (AS1 (je
,%l10
), xops
);
4773 /* check fourth byte and increment address */
4774 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
4775 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
4776 output_asm_insn (AS1 (jne
,%l8
), xops
);
4778 /* now generate fixups when the compare stops within a 4-byte word */
4779 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
4781 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
4782 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4784 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
4785 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4787 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));