1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem
/* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost
= { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost
= { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost
= {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 3, /* variable shift costs */
84 1, /* constant shift costs */
85 12, /* cost of starting a multiply */
86 1, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs
*ix86_cost
= &pentium_cost
;
92 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
94 extern FILE *asm_out_file
;
95 extern char *strcat ();
97 char *singlemove_string ();
98 char *output_move_const_single ();
99 char *output_fp_cc0_set ();
101 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
102 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
103 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
105 /* Array of the smallest class containing reg number REGNO, indexed by
106 REGNO. Used by REGNO_REG_CLASS in i386.h. */
108 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
111 AREG
, DREG
, CREG
, BREG
,
113 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
115 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
116 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
121 /* Test and compare insns in i386.md store the information needed to
122 generate branch and scc insns here. */
124 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
125 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
126 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
128 /* which cpu are we scheduling for */
129 enum processor_type ix86_cpu
;
131 /* which instruction set architecture to use. */
134 /* Strings to hold which cpu and instruction set architecture to use. */
135 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
136 char *ix86_isa_string
; /* for -misa=<xxx> */
138 /* Register allocation order */
139 char *i386_reg_alloc_order
;
140 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
142 /* # of registers to use to pass arguments. */
143 char *i386_regparm_string
; /* # registers to use to pass args */
144 int i386_regparm
; /* i386_regparm_string as a number */
146 /* Alignment to use for loops and jumps */
147 char *i386_align_loops_string
; /* power of two alignment for loops */
148 char *i386_align_jumps_string
; /* power of two alignment for non-loop jumps */
149 char *i386_align_funcs_string
; /* power of two alignment for functions */
150 char *i386_branch_cost_string
; /* values 1-5: see jump.c */
152 int i386_align_loops
; /* power of two alignment for loops */
153 int i386_align_jumps
; /* power of two alignment for non-loop jumps */
154 int i386_align_funcs
; /* power of two alignment for functions */
155 int i386_branch_cost
; /* values 1-5: see jump.c */
157 /* Sometimes certain combinations of command options do not make
158 sense on a particular target machine. You can define a macro
159 `OVERRIDE_OPTIONS' to take account of this. This macro, if
160 defined, is executed once just after all the command options have
163 Don't use this macro to turn on various extra optimizations for
164 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
175 char *name
; /* Canonical processor name. */
176 enum processor_type processor
; /* Processor type enum value. */
177 struct processor_costs
*cost
; /* Processor costs */
178 int target_enable
; /* Target flags to enable. */
179 int target_disable
; /* Target flags to disable. */
180 } processor_target_table
[]
181 = {{PROCESSOR_COMMON_STRING
, PROCESSOR_COMMON
, &i486_cost
, 0, 0},
182 {PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
183 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
184 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
185 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
186 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0},
187 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0}};
189 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
191 #ifdef SUBTARGET_OVERRIDE_OPTIONS
192 SUBTARGET_OVERRIDE_OPTIONS
;
195 /* Validate registers in register allocation order */
196 if (i386_reg_alloc_order
)
198 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
202 case 'a': regno
= 0; break;
203 case 'd': regno
= 1; break;
204 case 'c': regno
= 2; break;
205 case 'b': regno
= 3; break;
206 case 'S': regno
= 4; break;
207 case 'D': regno
= 5; break;
208 case 'B': regno
= 6; break;
210 default: fatal ("Register '%c' is unknown", ch
);
213 if (regs_allocated
[regno
])
214 fatal ("Register '%c' was already specified in the allocation order", ch
);
216 regs_allocated
[regno
] = 1;
220 /* Get the architectural level. */
221 if (ix86_isa_string
== (char *)0)
222 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
224 for (i
= 0; i
< ptt_size
; i
++)
225 if (! strcmp (ix86_isa_string
, processor_target_table
[i
].name
))
227 ix86_isa
= processor_target_table
[i
].processor
;
228 if (ix86_cpu_string
== (char *)0)
229 ix86_cpu_string
= processor_target_table
[i
].name
;
235 error ("bad value (%s) for -misa= switch", ix86_isa_string
);
236 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
237 ix86_isa
= PROCESSOR_DEFAULT
;
240 for (j
= 0; j
< ptt_size
; j
++)
241 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
243 ix86_cpu
= processor_target_table
[j
].processor
;
244 if (i
> j
&& (int)ix86_isa
>= (int)PROCESSOR_PENTIUMPRO
)
245 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string
, ix86_isa_string
);
247 target_flags
|= processor_target_table
[j
].target_enable
;
248 target_flags
&= ~processor_target_table
[j
].target_disable
;
254 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
255 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
256 ix86_cpu
= PROCESSOR_DEFAULT
;
259 /* Validate -mregparm= value */
260 if (i386_regparm_string
)
262 i386_regparm
= atoi (i386_regparm_string
);
263 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
264 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm
, REGPARM_MAX
);
267 def_align
= (TARGET_386
) ? 2 : 4;
269 /* Validate -malign-loops= value, or provide default */
270 if (i386_align_loops_string
)
272 i386_align_loops
= atoi (i386_align_loops_string
);
273 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
274 fatal ("-malign-loops=%d is not between 0 and %d",
275 i386_align_loops
, MAX_CODE_ALIGN
);
278 i386_align_loops
= 2;
280 /* Validate -malign-jumps= value, or provide default */
281 if (i386_align_jumps_string
)
283 i386_align_jumps
= atoi (i386_align_jumps_string
);
284 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
285 fatal ("-malign-jumps=%d is not between 0 and %d",
286 i386_align_jumps
, MAX_CODE_ALIGN
);
289 i386_align_jumps
= def_align
;
291 /* Validate -malign-functions= value, or provide default */
292 if (i386_align_funcs_string
)
294 i386_align_funcs
= atoi (i386_align_funcs_string
);
295 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
296 fatal ("-malign-functions=%d is not between 0 and %d",
297 i386_align_funcs
, MAX_CODE_ALIGN
);
300 i386_align_funcs
= def_align
;
302 /* Validate -mbranch-cost= value, or provide default */
303 if (i386_branch_cost_string
)
305 i386_branch_cost
= atoi (i386_branch_cost_string
);
306 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
307 fatal ("-mbranch-cost=%d is not between 0 and 5",
311 i386_branch_cost
= TARGET_PENTIUMPRO
? 4 : 1;
313 if (TARGET_OMIT_LEAF_FRAME_POINTER
) /* keep nonleaf frame pointers */
314 flag_omit_frame_pointer
= 1;
316 /* pic references don't explicitly mention pic_offset_table_rtx */
317 /* code threaded into the prologue may conflict with profiling */
318 if (flag_pic
|| profile_flag
|| profile_block_flag
)
319 target_flags
&= ~MASK_SCHEDULE_PROLOGUE
;
322 /* A C statement (sans semicolon) to choose the order in which to
323 allocate hard registers for pseudo-registers local to a basic
326 Store the desired register order in the array `reg_alloc_order'.
327 Element 0 should be the register to allocate first; element 1, the
328 next register; and so on.
330 The macro body should not assume anything about the contents of
331 `reg_alloc_order' before execution of the macro.
333 On most machines, it is not necessary to define this macro. */
336 order_regs_for_local_alloc ()
338 int i
, ch
, order
, regno
;
340 /* User specified the register allocation order */
341 if (i386_reg_alloc_order
)
343 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
347 case 'a': regno
= 0; break;
348 case 'd': regno
= 1; break;
349 case 'c': regno
= 2; break;
350 case 'b': regno
= 3; break;
351 case 'S': regno
= 4; break;
352 case 'D': regno
= 5; break;
353 case 'B': regno
= 6; break;
356 reg_alloc_order
[order
++] = regno
;
359 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
361 if (!regs_allocated
[i
])
362 reg_alloc_order
[order
++] = i
;
366 /* If users did not specify a register allocation order, use natural order */
369 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
370 reg_alloc_order
[i
] = i
;
376 optimization_options (level
)
379 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
380 make the problem with not enough registers even worse */
381 #ifdef INSN_SCHEDULING
383 flag_schedule_insns
= 0;
387 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
388 attribute for DECL. The attributes in ATTRIBUTES have previously been
392 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
401 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
402 attribute for TYPE. The attributes in ATTRIBUTES have previously been
406 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
412 if (TREE_CODE (type
) != FUNCTION_TYPE
413 && TREE_CODE (type
) != FIELD_DECL
414 && TREE_CODE (type
) != TYPE_DECL
)
417 /* Stdcall attribute says callee is responsible for popping arguments
418 if they are not variable. */
419 if (is_attribute_p ("stdcall", identifier
))
420 return (args
== NULL_TREE
);
422 /* Cdecl attribute says the callee is a normal C declaration */
423 if (is_attribute_p ("cdecl", identifier
))
424 return (args
== NULL_TREE
);
426 /* Regparm attribute specifies how many integer arguments are to be
427 passed in registers */
428 if (is_attribute_p ("regparm", identifier
))
432 if (!args
|| TREE_CODE (args
) != TREE_LIST
433 || TREE_CHAIN (args
) != NULL_TREE
434 || TREE_VALUE (args
) == NULL_TREE
)
437 cst
= TREE_VALUE (args
);
438 if (TREE_CODE (cst
) != INTEGER_CST
)
441 if (TREE_INT_CST_HIGH (cst
) != 0
442 || TREE_INT_CST_LOW (cst
) < 0
443 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
452 /* Return 0 if the attributes for two types are incompatible, 1 if they
453 are compatible, and 2 if they are nearly compatible (which causes a
454 warning to be generated). */
457 i386_comp_type_attributes (type1
, type2
)
465 /* Value is the number of bytes of arguments automatically
466 popped when returning from a subroutine call.
467 FUNDECL is the declaration node of the function (as a tree),
468 FUNTYPE is the data type of the function (as a tree),
469 or for a library call it is an identifier node for the subroutine name.
470 SIZE is the number of bytes of arguments passed on the stack.
472 On the 80386, the RTD insn may be used to pop them if the number
473 of args is fixed, but if the number is variable then the caller
474 must pop them all. RTD can't be used for library calls now
475 because the library is compiled with the Unix compiler.
476 Use of RTD is a selectable option, since it is incompatible with
477 standard Unix calling sequences. If the option is not selected,
478 the caller must always pop the args.
480 The attribute stdcall is equivalent to RTD on a per module basis. */
483 i386_return_pops_args (fundecl
, funtype
, size
)
488 int rtd
= TARGET_RTD
;
490 if (TREE_CODE (funtype
) == IDENTIFIER_NODE
)
493 /* Cdecl functions override -mrtd, and never pop the stack */
494 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
496 /* Stdcall functions will pop the stack if not variable args */
497 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
501 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
502 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
)))
506 /* Lose any fake structure return argument */
507 if (aggregate_value_p (TREE_TYPE (funtype
)))
508 return GET_MODE_SIZE (Pmode
);
514 /* Argument support functions. */
516 /* Initialize a variable CUM of type CUMULATIVE_ARGS
517 for a call to a function whose data type is FNTYPE.
518 For a library call, FNTYPE is 0. */
521 init_cumulative_args (cum
, fntype
, libname
)
522 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
523 tree fntype
; /* tree ptr for function decl */
524 rtx libname
; /* SYMBOL_REF of library name or 0 */
526 static CUMULATIVE_ARGS zero_cum
;
527 tree param
, next_param
;
529 if (TARGET_DEBUG_ARG
)
531 fprintf (stderr
, "\ninit_cumulative_args (");
534 tree ret_type
= TREE_TYPE (fntype
);
535 fprintf (stderr
, "fntype code = %s, ret code = %s",
536 tree_code_name
[ (int)TREE_CODE (fntype
) ],
537 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
540 fprintf (stderr
, "no fntype");
543 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
548 /* Set up the number of registers to use for passing arguments. */
549 cum
->nregs
= i386_regparm
;
552 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
554 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
557 /* Determine if this function has variable arguments. This is
558 indicated by the last argument being 'void_type_mode' if there
559 are no variable arguments. If there are variable arguments, then
560 we won't pass anything in registers */
564 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
568 next_param
= TREE_CHAIN (param
);
569 if (next_param
== (tree
)0 && TREE_VALUE (param
) != void_type_node
)
574 if (TARGET_DEBUG_ARG
)
575 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
580 /* Update the data in CUM to advance over an argument
581 of mode MODE and data type TYPE.
582 (TYPE is null for libcalls where that information may not be available.) */
585 function_arg_advance (cum
, mode
, type
, named
)
586 CUMULATIVE_ARGS
*cum
; /* current arg information */
587 enum machine_mode mode
; /* current arg mode */
588 tree type
; /* type of the argument or 0 if lib support */
589 int named
; /* whether or not the argument was named */
591 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
592 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
594 if (TARGET_DEBUG_ARG
)
596 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
597 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
612 /* Define where to put the arguments to a function.
613 Value is zero to push the argument on the stack,
614 or a hard register in which to store the argument.
616 MODE is the argument's machine mode.
617 TYPE is the data type of the argument (as a tree).
618 This is null for libcalls where that information may
620 CUM is a variable of type CUMULATIVE_ARGS which gives info about
621 the preceding args and about the function being called.
622 NAMED is nonzero if this argument is a named parameter
623 (otherwise it is an extra parameter matching an ellipsis). */
626 function_arg (cum
, mode
, type
, named
)
627 CUMULATIVE_ARGS
*cum
; /* current arg information */
628 enum machine_mode mode
; /* current arg mode */
629 tree type
; /* type of the argument or 0 if lib support */
630 int named
; /* != 0 for normal args, == 0 for ... args */
633 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
634 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
638 default: /* for now, pass fp/complex values on the stack */
646 if (words
<= cum
->nregs
)
647 ret
= gen_rtx (REG
, mode
, cum
->regno
);
651 if (TARGET_DEBUG_ARG
)
654 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
655 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
658 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
660 fprintf (stderr
, ", stack");
662 fprintf (stderr
, " )\n");
668 /* For an arg passed partly in registers and partly in memory,
669 this is the number of registers used.
670 For args passed entirely in registers or entirely in memory, zero. */
673 function_arg_partial_nregs (cum
, mode
, type
, named
)
674 CUMULATIVE_ARGS
*cum
; /* current arg information */
675 enum machine_mode mode
; /* current arg mode */
676 tree type
; /* type of the argument or 0 if lib support */
677 int named
; /* != 0 for normal args, == 0 for ... args */
683 /* Output an insn whose source is a 386 integer register. SRC is the
684 rtx for the register, and TEMPLATE is the op-code template. SRC may
685 be either SImode or DImode.
687 The template will be output with operands[0] as SRC, and operands[1]
688 as a pointer to the top of the 386 stack. So a call from floatsidf2
689 would look like this:
691 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
693 where %z0 corresponds to the caller's operands[1], and is used to
694 emit the proper size suffix.
696 ??? Extend this to handle HImode - a 387 can load and store HImode
700 output_op_from_reg (src
, template)
705 int size
= GET_MODE_SIZE (GET_MODE (src
));
708 xops
[1] = AT_SP (Pmode
);
709 xops
[2] = GEN_INT (size
);
710 xops
[3] = stack_pointer_rtx
;
712 if (size
> UNITS_PER_WORD
)
715 if (size
> 2 * UNITS_PER_WORD
)
717 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
718 output_asm_insn (AS1 (push
%L0
,%0), &high
);
720 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
721 output_asm_insn (AS1 (push
%L0
,%0), &high
);
723 output_asm_insn (AS1 (push
%L0
,%0), &src
);
725 output_asm_insn (template, xops
);
727 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
730 /* Output an insn to pop an value from the 387 top-of-stack to 386
731 register DEST. The 387 register stack is popped if DIES is true. If
732 the mode of DEST is an integer mode, a `fist' integer store is done,
733 otherwise a `fst' float store is done. */
736 output_to_reg (dest
, dies
)
741 int size
= GET_MODE_SIZE (GET_MODE (dest
));
743 xops
[0] = AT_SP (Pmode
);
744 xops
[1] = stack_pointer_rtx
;
745 xops
[2] = GEN_INT (size
);
748 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
750 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
753 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
755 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
757 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
760 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
763 if (GET_MODE (dest
) == XFmode
)
765 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
766 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
769 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
775 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
777 if (size
> UNITS_PER_WORD
)
779 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
780 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
781 if (size
> 2 * UNITS_PER_WORD
)
783 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
784 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
790 singlemove_string (operands
)
794 if (GET_CODE (operands
[0]) == MEM
795 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
797 if (XEXP (x
, 0) != stack_pointer_rtx
)
801 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
803 return output_move_const_single (operands
);
805 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
806 return AS2 (mov
%L0
,%1,%0);
807 else if (CONSTANT_P (operands
[1]))
808 return AS2 (mov
%L0
,%1,%0);
811 output_asm_insn ("push%L1 %1", operands
);
816 /* Return a REG that occurs in ADDR with coefficient 1.
817 ADDR can be effectively incremented by incrementing REG. */
823 while (GET_CODE (addr
) == PLUS
)
825 if (GET_CODE (XEXP (addr
, 0)) == REG
)
826 addr
= XEXP (addr
, 0);
827 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
828 addr
= XEXP (addr
, 1);
829 else if (CONSTANT_P (XEXP (addr
, 0)))
830 addr
= XEXP (addr
, 1);
831 else if (CONSTANT_P (XEXP (addr
, 1)))
832 addr
= XEXP (addr
, 0);
836 if (GET_CODE (addr
) == REG
)
842 /* Output an insn to add the constant N to the register X. */
853 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
855 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
858 xops
[1] = GEN_INT (-n
);
859 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
863 xops
[1] = GEN_INT (n
);
864 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
869 /* Output assembler code to perform a doubleword move insn
870 with operands OPERANDS. */
873 output_move_double (operands
)
876 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
880 rtx addreg0
= 0, addreg1
= 0;
881 int dest_overlapped_low
= 0;
882 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
887 /* First classify both operands. */
889 if (REG_P (operands
[0]))
891 else if (offsettable_memref_p (operands
[0]))
893 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
895 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
897 else if (GET_CODE (operands
[0]) == MEM
)
902 if (REG_P (operands
[1]))
904 else if (CONSTANT_P (operands
[1]))
906 else if (offsettable_memref_p (operands
[1]))
908 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
910 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
912 else if (GET_CODE (operands
[1]) == MEM
)
917 /* Check for the cases that the operand constraints are not
918 supposed to allow to happen. Abort if we get one,
919 because generating code for these cases is painful. */
921 if (optype0
== RNDOP
|| optype1
== RNDOP
)
924 /* If one operand is decrementing and one is incrementing
925 decrement the former register explicitly
926 and change that operand into ordinary indexing. */
928 if (optype0
== PUSHOP
&& optype1
== POPOP
)
930 /* ??? Can this ever happen on i386? */
931 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
932 asm_add (-size
, operands
[0]);
933 if (GET_MODE (operands
[1]) == XFmode
)
934 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
935 else if (GET_MODE (operands
[0]) == DFmode
)
936 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
938 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
942 if (optype0
== POPOP
&& optype1
== PUSHOP
)
944 /* ??? Can this ever happen on i386? */
945 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
946 asm_add (-size
, operands
[1]);
947 if (GET_MODE (operands
[1]) == XFmode
)
948 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
949 else if (GET_MODE (operands
[1]) == DFmode
)
950 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
952 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
956 /* If an operand is an unoffsettable memory ref, find a register
957 we can increment temporarily to make it refer to the second word. */
959 if (optype0
== MEMOP
)
960 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
962 if (optype1
== MEMOP
)
963 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
965 /* Ok, we can do one word at a time.
966 Normally we do the low-numbered word first,
967 but if either operand is autodecrementing then we
968 do the high-numbered word first.
970 In either case, set up in LATEHALF the operands to use
971 for the high-numbered word and in some cases alter the
972 operands in OPERANDS to be suitable for the low-numbered word. */
976 if (optype0
== REGOP
)
978 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
979 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
981 else if (optype0
== OFFSOP
)
983 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
984 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
988 middlehalf
[0] = operands
[0];
989 latehalf
[0] = operands
[0];
992 if (optype1
== REGOP
)
994 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
995 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
997 else if (optype1
== OFFSOP
)
999 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1000 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1002 else if (optype1
== CNSTOP
)
1004 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1006 REAL_VALUE_TYPE r
; long l
[3];
1008 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1009 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1010 operands
[1] = GEN_INT (l
[0]);
1011 middlehalf
[1] = GEN_INT (l
[1]);
1012 latehalf
[1] = GEN_INT (l
[2]);
1014 else if (CONSTANT_P (operands
[1]))
1015 /* No non-CONST_DOUBLE constant should ever appear here. */
1020 middlehalf
[1] = operands
[1];
1021 latehalf
[1] = operands
[1];
1024 else /* size is not 12: */
1026 if (optype0
== REGOP
)
1027 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1028 else if (optype0
== OFFSOP
)
1029 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1031 latehalf
[0] = operands
[0];
1033 if (optype1
== REGOP
)
1034 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1035 else if (optype1
== OFFSOP
)
1036 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1037 else if (optype1
== CNSTOP
)
1038 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1040 latehalf
[1] = operands
[1];
1043 /* If insn is effectively movd N (sp),-(sp) then we will do the
1044 high word first. We should use the adjusted operand 1
1045 (which is N+4 (sp) or N+8 (sp))
1046 for the low word and middle word as well,
1047 to compensate for the first decrement of sp. */
1048 if (optype0
== PUSHOP
1049 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1050 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1051 middlehalf
[1] = operands
[1] = latehalf
[1];
1053 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1054 if the upper part of reg N does not appear in the MEM, arrange to
1055 emit the move late-half first. Otherwise, compute the MEM address
1056 into the upper part of N and use that as a pointer to the memory
1058 if (optype0
== REGOP
1059 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1061 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1062 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1064 /* If both halves of dest are used in the src memory address,
1065 compute the address into latehalf of dest. */
1067 xops
[0] = latehalf
[0];
1068 xops
[1] = XEXP (operands
[1], 0);
1069 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1070 if( GET_MODE (operands
[1]) == XFmode
)
1073 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1074 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1075 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1079 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1080 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1084 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1086 /* Check for two regs used by both source and dest. */
1087 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1088 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1091 /* JRV says this can't happen: */
1092 if (addreg0
|| addreg1
)
1095 /* Only the middle reg conflicts; simply put it last. */
1096 output_asm_insn (singlemove_string (operands
), operands
);
1097 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1098 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1101 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1102 /* If the low half of dest is mentioned in the source memory
1103 address, the arrange to emit the move late half first. */
1104 dest_overlapped_low
= 1;
1107 /* If one or both operands autodecrementing,
1108 do the two words, high-numbered first. */
1110 /* Likewise, the first move would clobber the source of the second one,
1111 do them in the other order. This happens only for registers;
1112 such overlap can't happen in memory unless the user explicitly
1113 sets it up, and that is an undefined circumstance. */
1116 if (optype0 == PUSHOP || optype1 == PUSHOP
1117 || (optype0 == REGOP && optype1 == REGOP
1118 && REGNO (operands[0]) == REGNO (latehalf[1]))
1119 || dest_overlapped_low)
1121 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1122 || (optype0
== REGOP
&& optype1
== REGOP
1123 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1124 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1125 || dest_overlapped_low
)
1127 /* Make any unoffsettable addresses point at high-numbered word. */
1129 asm_add (size
-4, addreg0
);
1131 asm_add (size
-4, addreg1
);
1134 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1136 /* Undo the adds we just did. */
1138 asm_add (-4, addreg0
);
1140 asm_add (-4, addreg1
);
1144 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1146 asm_add (-4, addreg0
);
1148 asm_add (-4, addreg1
);
1151 /* Do low-numbered word. */
1152 return singlemove_string (operands
);
1155 /* Normal case: do the two words, low-numbered first. */
1157 output_asm_insn (singlemove_string (operands
), operands
);
1159 /* Do the middle one of the three words for long double */
1163 asm_add (4, addreg0
);
1165 asm_add (4, addreg1
);
1167 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1170 /* Make any unoffsettable addresses point at high-numbered word. */
1172 asm_add (4, addreg0
);
1174 asm_add (4, addreg1
);
1177 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1179 /* Undo the adds we just did. */
1181 asm_add (4-size
, addreg0
);
1183 asm_add (4-size
, addreg1
);
1189 #define MAX_TMPS 2 /* max temporary registers used */
1191 /* Output the appropriate code to move push memory on the stack */
1194 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1206 } tmp_info
[MAX_TMPS
];
1208 rtx src
= operands
[1];
1211 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1212 int stack_offset
= 0;
1216 if (!offsettable_memref_p (src
))
1217 fatal_insn ("Source is not offsettable", insn
);
1219 if ((length
& 3) != 0)
1220 fatal_insn ("Pushing non-word aligned size", insn
);
1222 /* Figure out which temporary registers we have available */
1223 for (i
= tmp_start
; i
< n_operands
; i
++)
1225 if (GET_CODE (operands
[i
]) == REG
)
1227 if (reg_overlap_mentioned_p (operands
[i
], src
))
1230 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1231 if (max_tmps
== MAX_TMPS
)
1237 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1239 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1240 output_asm_insn (AS1(push
%L0
,%0), xops
);
1246 for (offset
= length
- 4; offset
>= 0; )
1248 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1250 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1251 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1252 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1256 for (i
= 0; i
< num_tmps
; i
++)
1257 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1259 for (i
= 0; i
< num_tmps
; i
++)
1260 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1263 stack_offset
+= 4*num_tmps
;
1271 /* Output the appropriate code to move data between two memory locations */
1274 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1285 } tmp_info
[MAX_TMPS
];
1287 rtx dest
= operands
[0];
1288 rtx src
= operands
[1];
1289 rtx qi_tmp
= NULL_RTX
;
1295 if (GET_CODE (dest
) == MEM
1296 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1297 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1298 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1300 if (!offsettable_memref_p (src
))
1301 fatal_insn ("Source is not offsettable", insn
);
1303 if (!offsettable_memref_p (dest
))
1304 fatal_insn ("Destination is not offsettable", insn
);
1306 /* Figure out which temporary registers we have available */
1307 for (i
= tmp_start
; i
< n_operands
; i
++)
1309 if (GET_CODE (operands
[i
]) == REG
)
1311 if ((length
& 1) != 0 && !qi_tmp
&& QI_REG_P (operands
[i
]))
1312 qi_tmp
= operands
[i
];
1314 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1315 fatal_insn ("Temporary register overlaps the destination", insn
);
1317 if (reg_overlap_mentioned_p (operands
[i
], src
))
1318 fatal_insn ("Temporary register overlaps the source", insn
);
1320 tmp_info
[ max_tmps
++ ].xops
[2] = operands
[i
];
1321 if (max_tmps
== MAX_TMPS
)
1327 fatal_insn ("No scratch registers were found to do memory->memory moves", insn
);
1329 if ((length
& 1) != 0)
1332 fatal_insn ("No byte register found when moving odd # of bytes.", insn
);
1337 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1341 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1342 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1343 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1344 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1348 else if (length
>= 2)
1350 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1351 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1352 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1353 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1361 for (i
= 0; i
< num_tmps
; i
++)
1362 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1364 for (i
= 0; i
< num_tmps
; i
++)
1365 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1370 xops
[0] = adj_offsettable_operand (dest
, offset
);
1371 xops
[1] = adj_offsettable_operand (src
, offset
);
1373 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1374 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1382 standard_80387_constant_p (x
)
1385 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1390 if (setjmp (handler
))
1393 set_float_handler (handler
);
1394 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1395 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1396 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1397 set_float_handler (NULL_PTR
);
1405 /* Note that on the 80387, other constants, such as pi,
1406 are much slower to load as standard constants
1407 than to load from doubles in memory! */
1414 output_move_const_single (operands
)
1417 if (FP_REG_P (operands
[0]))
1419 int conval
= standard_80387_constant_p (operands
[1]);
1427 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1429 REAL_VALUE_TYPE r
; long l
;
1431 if (GET_MODE (operands
[1]) == XFmode
)
1434 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1435 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1436 operands
[1] = GEN_INT (l
);
1438 return singlemove_string (operands
);
1441 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1442 reference and a constant. */
1445 symbolic_operand (op
, mode
)
1447 enum machine_mode mode
;
1449 switch (GET_CODE (op
))
1456 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1457 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1458 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1464 /* Test for a valid operand for a call instruction.
1465 Don't allow the arg pointer register or virtual regs
1466 since they may change into reg + const, which the patterns
1467 can't handle yet. */
1470 call_insn_operand (op
, mode
)
1472 enum machine_mode mode
;
1474 if (GET_CODE (op
) == MEM
1475 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1476 /* This makes a difference for PIC. */
1477 && general_operand (XEXP (op
, 0), Pmode
))
1478 || (GET_CODE (XEXP (op
, 0)) == REG
1479 && XEXP (op
, 0) != arg_pointer_rtx
1480 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1481 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1486 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1490 expander_call_insn_operand (op
, mode
)
1492 enum machine_mode mode
;
1494 if (GET_CODE (op
) == MEM
1495 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1496 || (GET_CODE (XEXP (op
, 0)) == REG
1497 && XEXP (op
, 0) != arg_pointer_rtx
1498 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1499 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1504 /* Return 1 if OP is a comparison operator that can use the condition code
1505 generated by an arithmetic operation. */
1508 arithmetic_comparison_operator (op
, mode
)
1510 enum machine_mode mode
;
1514 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1516 code
= GET_CODE (op
);
1517 if (GET_RTX_CLASS (code
) != '<')
1520 return (code
!= GT
&& code
!= LE
);
1523 /* Returns 1 if OP contains a symbol reference */
1526 symbolic_reference_mentioned_p (op
)
1532 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1535 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1536 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1542 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1543 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1546 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1553 /* Attempt to expand a binary operator. Make the expansion closer to the
1554 actual machine, then just general_operand, which will allow 3 separate
1555 memory references (one output, two input) in a single insn. Return
1556 whether the insn fails, or succeeds. */
1559 ix86_expand_binary_operator (code
, mode
, operands
)
1561 enum machine_mode mode
;
1568 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1569 if (GET_RTX_CLASS (code
) == 'c'
1570 && (rtx_equal_p (operands
[0], operands
[2])
1571 || immediate_operand (operands
[1], mode
)))
1573 rtx temp
= operands
[1];
1574 operands
[1] = operands
[2];
1578 /* If optimizing, copy to regs to improve CSE */
1579 if (TARGET_PSEUDO
&& optimize
&& ((reload_in_progress
| reload_completed
) == 0))
1581 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1582 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1584 if (GET_CODE (operands
[2]) == MEM
)
1585 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1587 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1589 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1590 emit_move_insn (temp
, operands
[1]);
1596 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1598 /* If not optimizing, try to make a valid insn (optimize code previously did
1599 this above to improve chances of CSE) */
1601 if ((!TARGET_PSEUDO
|| !optimize
)
1602 && ((reload_in_progress
| reload_completed
) == 0)
1603 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1606 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1608 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1612 if (GET_CODE (operands
[2]) == MEM
)
1614 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1618 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1620 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1621 emit_move_insn (temp
, operands
[1]);
1626 if (modified
&& !ix86_binary_operator_ok (code
, mode
, operands
))
1636 /* Return TRUE or FALSE depending on whether the binary operator meets the
1637 appropriate constraints. */
1640 ix86_binary_operator_ok (code
, mode
, operands
)
1642 enum machine_mode mode
;
1645 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1646 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1649 /* Attempt to expand a unary operator. Make the expansion closer to the
1650 actual machine, then just general_operand, which will allow 2 separate
1651 memory references (one output, one input) in a single insn. Return
1652 whether the insn fails, or succeeds. */
1655 ix86_expand_unary_operator (code
, mode
, operands
)
1657 enum machine_mode mode
;
1662 /* If optimizing, copy to regs to improve CSE */
1665 && ((reload_in_progress
| reload_completed
) == 0)
1666 && GET_CODE (operands
[1]) == MEM
)
1668 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1671 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1673 if ((!TARGET_PSEUDO
|| !optimize
)
1674 && ((reload_in_progress
| reload_completed
) == 0)
1675 && GET_CODE (operands
[1]) == MEM
)
1677 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1678 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1688 /* Return TRUE or FALSE depending on whether the unary operator meets the
1689 appropriate constraints. */
1692 ix86_unary_operator_ok (code
, mode
, operands
)
1694 enum machine_mode mode
;
1702 static rtx pic_label_rtx
;
1704 /* This function generates code for -fpic that loads %ebx with
1705 with the return address of the caller and then returns. */
1707 asm_output_function_prefix (file
, name
)
1712 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1713 || current_function_uses_const_pool
);
1714 xops
[0] = pic_offset_table_rtx
;
1715 xops
[1] = stack_pointer_rtx
;
1717 /* deep branch prediction favors having a return for every call */
1718 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1720 if (pic_label_rtx
== 0)
1721 pic_label_rtx
= (rtx
) gen_label_rtx ();
1722 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (pic_label_rtx
));
1723 output_asm_insn ("movl (%1),%0", xops
);
1724 output_asm_insn ("ret", xops
);
1728 /* Set up the stack and frame (if desired) for the function. */
1731 function_prologue (file
, size
)
1738 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1739 || current_function_uses_const_pool
);
1740 long tsize
= get_frame_size ();
1742 /* pic references don't explicitly mention pic_offset_table_rtx */
1743 if (TARGET_SCHEDULE_PROLOGUE
)
1746 xops
[0] = stack_pointer_rtx
;
1747 xops
[1] = frame_pointer_rtx
;
1748 xops
[2] = GEN_INT (tsize
);
1750 if (frame_pointer_needed
)
1752 output_asm_insn ("push%L1 %1", xops
);
1753 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
1758 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1759 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
1762 xops
[3] = gen_rtx (REG
, SImode
, 0);
1763 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
1765 xops
[3] = gen_rtx (SYMBOL_REF
, Pmode
, "_alloca");
1766 output_asm_insn (AS1 (call
,%P3
), xops
);
1769 /* Note If use enter it is NOT reversed args.
1770 This one is not reversed from intel!!
1771 I think enter is slower. Also sdb doesn't like it.
1772 But if you want it the code is:
1774 xops[3] = const0_rtx;
1775 output_asm_insn ("enter %2,%3", xops);
1778 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1779 for (regno
= limit
- 1; regno
>= 0; regno
--)
1780 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1781 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1783 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1784 output_asm_insn ("push%L0 %0", xops
);
1787 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1789 xops
[0] = pic_offset_table_rtx
;
1790 if (pic_label_rtx
== 0)
1791 pic_label_rtx
= (rtx
) gen_label_rtx ();
1792 xops
[1] = pic_label_rtx
;
1794 output_asm_insn (AS1 (call
,%P1
), xops
);
1795 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1797 else if (pic_reg_used
)
1799 xops
[0] = pic_offset_table_rtx
;
1800 xops
[1] = (rtx
) gen_label_rtx ();
1802 output_asm_insn (AS1 (call
,%P1
), xops
);
1803 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (xops
[1]));
1804 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1805 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1809 /* This function generates the assembly code for function entry.
1810 FILE is an stdio stream to output the code to.
1811 SIZE is an int: how many units of temporary storage to allocate. */
1814 ix86_expand_prologue ()
1819 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1820 || current_function_uses_const_pool
);
1821 long tsize
= get_frame_size ();
1823 if (!TARGET_SCHEDULE_PROLOGUE
)
1826 xops
[0] = stack_pointer_rtx
;
1827 xops
[1] = frame_pointer_rtx
;
1828 xops
[2] = GEN_INT (tsize
);
1829 if (frame_pointer_needed
)
1831 emit_insn (gen_rtx (SET
, 0,
1832 gen_rtx (MEM
, SImode
,
1833 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1834 frame_pointer_rtx
));
1835 emit_move_insn (xops
[1], xops
[0]);
1840 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1841 emit_insn (gen_subsi3 (xops
[0], xops
[0], xops
[2]));
1844 xops
[3] = gen_rtx (REG
, SImode
, 0);
1845 emit_move_insn (xops
[3], xops
[2]);
1846 xops
[3] = gen_rtx (MEM
, FUNCTION_MODE
,
1847 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
1848 emit_call_insn (gen_rtx (CALL
, VOIDmode
,
1849 xops
[3], const0_rtx
));
1852 /* Note If use enter it is NOT reversed args.
1853 This one is not reversed from intel!!
1854 I think enter is slower. Also sdb doesn't like it.
1855 But if you want it the code is:
1857 xops[3] = const0_rtx;
1858 output_asm_insn ("enter %2,%3", xops);
1861 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1862 for (regno
= limit
- 1; regno
>= 0; regno
--)
1863 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1864 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1866 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1867 emit_insn (gen_rtx (SET
, 0,
1868 gen_rtx (MEM
, SImode
,
1869 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1873 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1875 xops
[0] = pic_offset_table_rtx
;
1876 if (pic_label_rtx
== 0)
1877 pic_label_rtx
= (rtx
) gen_label_rtx ();
1878 xops
[1] = pic_label_rtx
;
1880 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1881 emit_insn (gen_prologue_set_got (xops
[0],
1882 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1883 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1885 else if (pic_reg_used
)
1887 xops
[0] = pic_offset_table_rtx
;
1888 xops
[1] = (rtx
) gen_label_rtx ();
1890 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1891 emit_insn (gen_pop (xops
[0]));
1892 emit_insn (gen_prologue_set_got (xops
[0],
1893 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1894 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER (xops
[1]))));
1898 /* Restore function stack, frame, and registers. */
1901 function_epilogue (file
, size
)
1907 /* Return 1 if it is appropriate to emit `ret' instructions in the
1908 body of a function. Do this only if the epilogue is simple, needing a
1909 couple of insns. Prior to reloading, we can't tell how many registers
1910 must be saved, so return 0 then. Return 0 if there is no frame
1911 marker to de-allocate.
1913 If NON_SAVING_SETJMP is defined and true, then it is not possible
1914 for the epilogue to be simple, so return 0. This is a special case
1915 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1916 until final, but jump_optimize may need to know sooner if a
1920 ix86_can_use_return_insn_p ()
1924 int reglimit
= (frame_pointer_needed
1925 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1926 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1927 || current_function_uses_const_pool
);
1929 #ifdef NON_SAVING_SETJMP
1930 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1934 if (! reload_completed
)
1937 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
1938 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1939 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1942 return nregs
== 0 || ! frame_pointer_needed
;
1946 /* This function generates the assembly code for function exit.
1947 FILE is an stdio stream to output the code to.
1948 SIZE is an int: how many units of temporary storage to deallocate. */
1951 ix86_expand_epilogue ()
1954 register int nregs
, limit
;
1957 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1958 || current_function_uses_const_pool
);
1959 long tsize
= get_frame_size ();
1961 /* Compute the number of registers to pop */
1963 limit
= (frame_pointer_needed
1964 ? FRAME_POINTER_REGNUM
1965 : STACK_POINTER_REGNUM
);
1969 for (regno
= limit
- 1; regno
>= 0; regno
--)
1970 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1971 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1974 /* sp is often unreliable so we must go off the frame pointer,
1977 /* In reality, we may not care if sp is unreliable, because we can
1978 restore the register relative to the frame pointer. In theory,
1979 since each move is the same speed as a pop, and we don't need the
1980 leal, this is faster. For now restore multiple registers the old
1983 offset
= -tsize
- (nregs
* UNITS_PER_WORD
);
1985 xops
[2] = stack_pointer_rtx
;
1987 if (nregs
> 1 || ! frame_pointer_needed
)
1989 if (frame_pointer_needed
)
1991 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
1992 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
1993 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1996 for (regno
= 0; regno
< limit
; regno
++)
1997 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1998 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2000 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2001 emit_insn (gen_pop (xops
[0]));
2002 /* output_asm_insn ("pop%L0 %0", xops);*/
2006 for (regno
= 0; regno
< limit
; regno
++)
2007 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2008 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2010 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2011 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2012 emit_move_insn (xops
[0], xops
[1]);
2013 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2017 if (frame_pointer_needed
)
2019 /* If not an i386, mov & pop is faster than "leave". */
2021 if (TARGET_USE_LEAVE
)
2022 emit_insn (gen_leave());
2023 /* output_asm_insn ("leave", xops);*/
2026 xops
[0] = frame_pointer_rtx
;
2027 xops
[1] = stack_pointer_rtx
;
2028 emit_insn (gen_epilogue_set_stack_ptr());
2029 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2030 emit_insn (gen_pop (xops
[0]));
2031 /* output_asm_insn ("pop%L0 %0", xops);*/
2036 /* If there is no frame pointer, we must still release the frame. */
2038 xops
[0] = GEN_INT (tsize
);
2039 emit_insn (gen_rtx (SET
, SImode
,
2041 gen_rtx (PLUS
, SImode
,
2044 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2047 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2048 if (profile_block_flag
== 2)
2050 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2054 if (current_function_pops_args
&& current_function_args_size
)
2056 xops
[1] = GEN_INT (current_function_pops_args
);
2058 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2059 asked to pop more, pop return address, do explicit add, and jump
2060 indirectly to the caller. */
2062 if (current_function_pops_args
>= 32768)
2064 /* ??? Which register to use here? */
2065 xops
[0] = gen_rtx (REG
, SImode
, 2);
2066 emit_insn (gen_pop (xops
[0]));
2067 /* output_asm_insn ("pop%L0 %0", xops);*/
2068 emit_insn (gen_rtx (SET
, SImode
,
2070 gen_rtx (PLUS
, SImode
,
2073 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2074 emit_jump_insn (xops
[0]);
2075 /* output_asm_insn ("jmp %*%0", xops);*/
2078 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2079 /* output_asm_insn ("ret %1", xops);*/
2082 /* output_asm_insn ("ret", xops);*/
2083 emit_jump_insn (gen_return_internal ());
2087 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2088 that is a valid memory address for an instruction.
2089 The MODE argument is the machine mode for the MEM expression
2090 that wants to use this address.
2092 On x86, legitimate addresses are:
2093 base movl (base),reg
2094 displacement movl disp,reg
2095 base + displacement movl disp(base),reg
2096 index + base movl (base,index),reg
2097 (index + base) + displacement movl disp(base,index),reg
2098 index*scale movl (,index,scale),reg
2099 index*scale + disp movl disp(,index,scale),reg
2100 index*scale + base movl (base,index,scale),reg
2101 (index*scale + base) + disp movl disp(base,index,scale),reg
2103 In each case, scale can be 1, 2, 4, 8. */
2105 /* This is exactly the same as print_operand_addr, except that
2106 it recognizes addresses instead of printing them.
2108 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2109 convert common non-canonical forms to canonical form so that they will
2112 #define ADDR_INVALID(msg,insn) \
2114 if (TARGET_DEBUG_ADDR) \
2116 fprintf (stderr, msg); \
2122 legitimate_address_p (mode
, addr
, strict
)
2123 enum machine_mode mode
;
2127 rtx base
= NULL_RTX
;
2128 rtx indx
= NULL_RTX
;
2129 rtx scale
= NULL_RTX
;
2130 rtx disp
= NULL_RTX
;
2132 if (TARGET_DEBUG_ADDR
)
2135 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2136 GET_MODE_NAME (mode
), strict
);
2141 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2142 base
= addr
; /* base reg */
2144 else if (GET_CODE (addr
) == PLUS
)
2146 rtx op0
= XEXP (addr
, 0);
2147 rtx op1
= XEXP (addr
, 1);
2148 enum rtx_code code0
= GET_CODE (op0
);
2149 enum rtx_code code1
= GET_CODE (op1
);
2151 if (code0
== REG
|| code0
== SUBREG
)
2153 if (code1
== REG
|| code1
== SUBREG
)
2155 indx
= op0
; /* index + base */
2161 base
= op0
; /* base + displacement */
2166 else if (code0
== MULT
)
2168 indx
= XEXP (op0
, 0);
2169 scale
= XEXP (op0
, 1);
2171 if (code1
== REG
|| code1
== SUBREG
)
2172 base
= op1
; /* index*scale + base */
2175 disp
= op1
; /* index*scale + disp */
2178 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2180 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2181 scale
= XEXP (XEXP (op0
, 0), 1);
2182 base
= XEXP (op0
, 1);
2186 else if (code0
== PLUS
)
2188 indx
= XEXP (op0
, 0); /* index + base + disp */
2189 base
= XEXP (op0
, 1);
2195 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2200 else if (GET_CODE (addr
) == MULT
)
2202 indx
= XEXP (addr
, 0); /* index*scale */
2203 scale
= XEXP (addr
, 1);
2207 disp
= addr
; /* displacement */
2209 /* Allow arg pointer and stack pointer as index if there is not scaling */
2210 if (base
&& indx
&& !scale
2211 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2218 /* Validate base register */
2219 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2220 is one word out of a two word structure, which is represented internally
2224 if (GET_CODE (base
) != REG
)
2226 ADDR_INVALID ("Base is not a register.\n", base
);
2230 if ((strict
&& !REG_OK_FOR_BASE_STRICT_P (base
))
2231 || (!strict
&& !REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2233 ADDR_INVALID ("Base is not valid.\n", base
);
2238 /* Validate index register */
2239 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2240 is one word out of a two word structure, which is represented internally
2244 if (GET_CODE (indx
) != REG
)
2246 ADDR_INVALID ("Index is not a register.\n", indx
);
2250 if ((strict
&& !REG_OK_FOR_INDEX_STRICT_P (indx
))
2251 || (!strict
&& !REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2253 ADDR_INVALID ("Index is not valid.\n", indx
);
2258 abort (); /* scale w/o index invalid */
2260 /* Validate scale factor */
2263 HOST_WIDE_INT value
;
2265 if (GET_CODE (scale
) != CONST_INT
)
2267 ADDR_INVALID ("Scale is not valid.\n", scale
);
2271 value
= INTVAL (scale
);
2272 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2274 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2279 /* Validate displacement
2280 Constant pool addresses must be handled special. They are
2281 considered legitimate addresses, but only if not used with regs.
2282 When printed, the output routines know to print the reference with the
2283 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2286 if (GET_CODE (disp
) == SYMBOL_REF
2287 && CONSTANT_POOL_ADDRESS_P (disp
)
2292 else if (!CONSTANT_ADDRESS_P (disp
))
2294 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2298 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2300 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2304 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2305 && base
!= pic_offset_table_rtx
2306 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2308 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2312 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2313 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2315 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp
);
2320 if (TARGET_DEBUG_ADDR
)
2321 fprintf (stderr
, "Address is valid.\n");
2323 /* Everything looks valid, return true */
2328 /* Return a legitimate reference for ORIG (an address) using the
2329 register REG. If REG is 0, a new pseudo is generated.
2331 There are three types of references that must be handled:
2333 1. Global data references must load the address from the GOT, via
2334 the PIC reg. An insn is emitted to do this load, and the reg is
2337 2. Static data references must compute the address as an offset
2338 from the GOT, whose base is in the PIC reg. An insn is emitted to
2339 compute the address into a reg, and the reg is returned. Static
2340 data objects have SYMBOL_REF_FLAG set to differentiate them from
2341 global data objects.
2343 3. Constant pool addresses must be handled special. They are
2344 considered legitimate addresses, but only if not used with regs.
2345 When printed, the output routines know to print the reference with the
2346 PIC reg, even though the PIC reg doesn't appear in the RTL.
2348 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2349 reg also appears in the address (except for constant pool references,
2352 "switch" statements also require special handling when generating
2353 PIC code. See comments by the `casesi' insn in i386.md for details. */
2356 legitimize_pic_address (orig
, reg
)
2363 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2365 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2370 reg
= gen_reg_rtx (Pmode
);
2372 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2373 || GET_CODE (addr
) == LABEL_REF
)
2374 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2376 new = gen_rtx (MEM
, Pmode
,
2377 gen_rtx (PLUS
, Pmode
,
2378 pic_offset_table_rtx
, orig
));
2380 emit_move_insn (reg
, new);
2382 current_function_uses_pic_offset_table
= 1;
2385 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2389 if (GET_CODE (addr
) == CONST
)
2391 addr
= XEXP (addr
, 0);
2392 if (GET_CODE (addr
) != PLUS
)
2396 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2400 reg
= gen_reg_rtx (Pmode
);
2402 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2403 addr
= legitimize_pic_address (XEXP (addr
, 1),
2404 base
== reg
? NULL_RTX
: reg
);
2406 if (GET_CODE (addr
) == CONST_INT
)
2407 return plus_constant (base
, INTVAL (addr
));
2409 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2411 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2412 addr
= XEXP (addr
, 1);
2414 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2420 /* Emit insns to move operands[1] into operands[0]. */
2423 emit_pic_move (operands
, mode
)
2425 enum machine_mode mode
;
2427 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2429 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2430 operands
[1] = (rtx
) force_reg (SImode
, operands
[1]);
2432 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2436 /* Try machine-dependent ways of modifying an illegitimate address
2437 to be legitimate. If we find one, return the new, valid address.
2438 This macro is used in only one place: `memory_address' in explow.c.
2440 OLDX is the address as it was before break_out_memory_refs was called.
2441 In some cases it is useful to look at this to decide what needs to be done.
2443 MODE and WIN are passed so that this macro can use
2444 GO_IF_LEGITIMATE_ADDRESS.
2446 It is always safe for this macro to do nothing. It exists to recognize
2447 opportunities to optimize the output.
2449 For the 80386, we handle X+REG by loading X into a register R and
2450 using R+REG. R will go in a general reg and indexing will be used.
2451 However, if REG is a broken-out memory address or multiplication,
2452 nothing needs to be done because REG can certainly go in a general reg.
2454 When -fpic is used, special handling is needed for symbolic references.
2455 See comments by legitimize_pic_address in i386.c for details. */
2458 legitimize_address (x
, oldx
, mode
)
2461 enum machine_mode mode
;
2466 if (TARGET_DEBUG_ADDR
)
2468 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode
));
2472 if (flag_pic
&& SYMBOLIC_CONST (x
))
2473 return legitimize_pic_address (x
, 0);
2475 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2476 if (GET_CODE (x
) == ASHIFT
2477 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2478 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2481 x
= gen_rtx (MULT
, Pmode
,
2482 force_reg (Pmode
, XEXP (x
, 0)),
2483 GEN_INT (1 << log
));
2486 if (GET_CODE (x
) == PLUS
)
2488 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2489 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2490 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2491 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2494 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2495 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2496 GEN_INT (1 << log
));
2499 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2500 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2501 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2504 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2505 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2506 GEN_INT (1 << log
));
2509 /* Put multiply first if it isn't already */
2510 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2512 rtx tmp
= XEXP (x
, 0);
2513 XEXP (x
, 0) = XEXP (x
, 1);
2518 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2519 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2520 created by virtual register instantiation, register elimination, and
2521 similar optimizations. */
2522 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2525 x
= gen_rtx (PLUS
, Pmode
,
2526 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)),
2527 XEXP (XEXP (x
, 1), 1));
2530 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2531 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2532 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2533 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2534 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2535 && CONSTANT_P (XEXP (x
, 1)))
2537 rtx constant
, other
;
2539 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2541 constant
= XEXP (x
, 1);
2542 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2544 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2546 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2547 other
= XEXP (x
, 1);
2555 x
= gen_rtx (PLUS
, Pmode
,
2556 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2557 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2558 plus_constant (other
, INTVAL (constant
)));
2562 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2565 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2568 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2571 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2574 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2578 && GET_CODE (XEXP (x
, 1)) == REG
2579 && GET_CODE (XEXP (x
, 0)) == REG
)
2582 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2585 x
= legitimize_pic_address (x
, 0);
2588 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2591 if (GET_CODE (XEXP (x
, 0)) == REG
)
2593 register rtx temp
= gen_reg_rtx (Pmode
);
2594 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2596 emit_move_insn (temp
, val
);
2602 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2604 register rtx temp
= gen_reg_rtx (Pmode
);
2605 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2607 emit_move_insn (temp
, val
);
2618 /* Print an integer constant expression in assembler syntax. Addition
2619 and subtraction are the only arithmetic that may appear in these
2620 expressions. FILE is the stdio stream to write to, X is the rtx, and
2621 CODE is the operand print code from the output string. */
2624 output_pic_addr_const (file
, x
, code
)
2631 switch (GET_CODE (x
))
2642 if (GET_CODE (x
) == SYMBOL_REF
)
2643 assemble_name (file
, XSTR (x
, 0));
2646 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2647 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2648 assemble_name (asm_out_file
, buf
);
2651 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2652 fprintf (file
, "@GOTOFF(%%ebx)");
2653 else if (code
== 'P')
2654 fprintf (file
, "@PLT");
2655 else if (GET_CODE (x
) == LABEL_REF
)
2656 fprintf (file
, "@GOTOFF");
2657 else if (! SYMBOL_REF_FLAG (x
))
2658 fprintf (file
, "@GOT");
2660 fprintf (file
, "@GOTOFF");
2665 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2666 assemble_name (asm_out_file
, buf
);
2670 fprintf (file
, "%d", INTVAL (x
));
2674 /* This used to output parentheses around the expression,
2675 but that does not work on the 386 (either ATT or BSD assembler). */
2676 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2680 if (GET_MODE (x
) == VOIDmode
)
2682 /* We can use %d if the number is <32 bits and positive. */
2683 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2684 fprintf (file
, "0x%x%08x",
2685 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2687 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2690 /* We can't handle floating point constants;
2691 PRINT_OPERAND must handle them. */
2692 output_operand_lossage ("floating constant misused");
2696 /* Some assemblers need integer constants to appear last (eg masm). */
2697 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
2699 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2700 if (INTVAL (XEXP (x
, 0)) >= 0)
2701 fprintf (file
, "+");
2702 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2706 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2707 if (INTVAL (XEXP (x
, 1)) >= 0)
2708 fprintf (file
, "+");
2709 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2714 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2715 fprintf (file
, "-");
2716 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2720 output_operand_lossage ("invalid expression as operand");
2724 /* Append the correct conditional move suffix which corresponds to CODE */
2727 put_condition_code (code
, mode
, file
)
2729 enum mode_class mode
;
2732 if (mode
== MODE_INT
)
2736 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2742 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2748 fputs ("ge", file
); return;
2750 fputs ("g", file
); return;
2752 fputs ("le", file
); return;
2754 fputs ("l", file
); return;
2756 fputs ("ae", file
); return;
2758 fputs ("a", file
); return;
2760 fputs ("be", file
); return;
2762 fputs ("b", file
); return;
2763 default: output_operand_lossage ("Invalid %%C operand");
2765 else if (mode
== MODE_FLOAT
)
2769 fputs ("ne", file
); return;
2771 fputs ("e", file
); return;
2773 fputs ("nb", file
); return;
2775 fputs ("nbe", file
); return;
2777 fputs ("be", file
); return;
2779 fputs ("b", file
); return;
2781 fputs ("nb", file
); return;
2783 fputs ("nbe", file
); return;
2785 fputs ("be", file
); return;
2787 fputs ("b", file
); return;
2788 default: output_operand_lossage ("Invalid %%C operand");
2793 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2794 C -- print opcode suffix for set/cmov insn.
2795 c -- like C, but print reversed condition
2796 F -- print opcode suffix for fcmov insn.
2797 f -- like C, but print reversed condition
2798 R -- print the prefix for register names.
2799 z -- print the opcode suffix for the size of the current operand.
2800 * -- print a star (in certain assembler syntax)
2801 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2802 c -- don't print special prefixes before constant operands.
2803 J -- print the appropriate jump operand.
2804 s -- print a shift double count, followed by the assemblers argument
2806 b -- print the QImode name of the register for the indicated operand.
2807 %b0 would print %al if operands[0] is reg 0.
2808 w -- likewise, print the HImode name of the register.
2809 k -- likewise, print the SImode name of the register.
2810 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2811 y -- print "st(0)" instead of "st" as a register.
2812 P -- print as a PIC constant
2816 print_operand (file
, x
, code
)
2831 PUT_OP_SIZE (code
, 'l', file
);
2835 PUT_OP_SIZE (code
, 'w', file
);
2839 PUT_OP_SIZE (code
, 'b', file
);
2843 PUT_OP_SIZE (code
, 'l', file
);
2847 PUT_OP_SIZE (code
, 's', file
);
2851 PUT_OP_SIZE (code
, 't', file
);
2855 /* 387 opcodes don't get size suffixes if the operands are
2858 if (STACK_REG_P (x
))
2861 /* this is the size of op from size of operand */
2862 switch (GET_MODE_SIZE (GET_MODE (x
)))
2865 PUT_OP_SIZE ('B', 'b', file
);
2869 PUT_OP_SIZE ('W', 'w', file
);
2873 if (GET_MODE (x
) == SFmode
)
2875 PUT_OP_SIZE ('S', 's', file
);
2879 PUT_OP_SIZE ('L', 'l', file
);
2883 PUT_OP_SIZE ('T', 't', file
);
2887 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
2889 #ifdef GAS_MNEMONICS
2890 PUT_OP_SIZE ('Q', 'q', file
);
2893 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
2897 PUT_OP_SIZE ('Q', 'l', file
);
2910 switch (GET_CODE (x
))
2912 /* These conditions are appropriate for testing the result
2913 of an arithmetic operation, not for a compare operation.
2914 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2915 CC_Z_IN_NOT_C false and not floating point. */
2916 case NE
: fputs ("jne", file
); return;
2917 case EQ
: fputs ("je", file
); return;
2918 case GE
: fputs ("jns", file
); return;
2919 case LT
: fputs ("js", file
); return;
2920 case GEU
: fputs ("jmp", file
); return;
2921 case GTU
: fputs ("jne", file
); return;
2922 case LEU
: fputs ("je", file
); return;
2923 case LTU
: fputs ("#branch never", file
); return;
2925 /* no matching branches for GT nor LE */
2930 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
2932 PRINT_OPERAND (file
, x
, 0);
2933 fputs (AS2C (,) + 1, file
);
2937 /* This is used by the conditional move instructions. */
2939 put_condition_code (GET_CODE (x
), MODE_INT
, file
);
2942 /* like above, but reverse condition */
2944 put_condition_code (reverse_condition (GET_CODE (x
)), MODE_INT
, file
);
2948 put_condition_code (GET_CODE (x
), MODE_FLOAT
, file
);
2951 /* like above, but reverse condition */
2953 put_condition_code (reverse_condition (GET_CODE (x
)),
2961 sprintf (str
, "invalid operand code `%c'", code
);
2962 output_operand_lossage (str
);
2966 if (GET_CODE (x
) == REG
)
2968 PRINT_REG (x
, code
, file
);
2970 else if (GET_CODE (x
) == MEM
)
2972 PRINT_PTR (x
, file
);
2973 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
2976 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2978 output_addr_const (file
, XEXP (x
, 0));
2981 output_address (XEXP (x
, 0));
2983 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
2985 REAL_VALUE_TYPE r
; long l
;
2986 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2987 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
2988 PRINT_IMMED_PREFIX (file
);
2989 fprintf (file
, "0x%x", l
);
2991 /* These float cases don't actually occur as immediate operands. */
2992 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
2994 REAL_VALUE_TYPE r
; char dstr
[30];
2995 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2996 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
2997 fprintf (file
, "%s", dstr
);
2999 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3001 REAL_VALUE_TYPE r
; char dstr
[30];
3002 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3003 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3004 fprintf (file
, "%s", dstr
);
3010 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3011 PRINT_IMMED_PREFIX (file
);
3012 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3013 || GET_CODE (x
) == LABEL_REF
)
3014 PRINT_OFFSET_PREFIX (file
);
3017 output_pic_addr_const (file
, x
, code
);
3019 output_addr_const (file
, x
);
3023 /* Print a memory operand whose address is ADDR. */
3026 print_operand_address (file
, addr
)
3030 register rtx reg1
, reg2
, breg
, ireg
;
3033 switch (GET_CODE (addr
))
3037 fprintf (file
, "%se", RP
);
3038 fputs (hi_reg_name
[REGNO (addr
)], file
);
3048 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3050 offset
= XEXP (addr
, 0);
3051 addr
= XEXP (addr
, 1);
3053 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3055 offset
= XEXP (addr
, 1);
3056 addr
= XEXP (addr
, 0);
3058 if (GET_CODE (addr
) != PLUS
) ;
3059 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3061 reg1
= XEXP (addr
, 0);
3062 addr
= XEXP (addr
, 1);
3064 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3066 reg1
= XEXP (addr
, 1);
3067 addr
= XEXP (addr
, 0);
3069 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3071 reg1
= XEXP (addr
, 0);
3072 addr
= XEXP (addr
, 1);
3074 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3076 reg1
= XEXP (addr
, 1);
3077 addr
= XEXP (addr
, 0);
3079 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3081 if (reg1
== 0) reg1
= addr
;
3087 if (addr
!= 0) abort ();
3090 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3091 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3096 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3102 if (ireg
!= 0 || breg
!= 0)
3109 output_pic_addr_const (file
, addr
, 0);
3111 else if (GET_CODE (addr
) == LABEL_REF
)
3112 output_asm_label (addr
);
3115 output_addr_const (file
, addr
);
3118 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3120 scale
= INTVAL (XEXP (ireg
, 1));
3121 ireg
= XEXP (ireg
, 0);
3124 /* The stack pointer can only appear as a base register,
3125 never an index register, so exchange the regs if it is wrong. */
3127 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3136 /* output breg+ireg*scale */
3137 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3144 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3146 scale
= INTVAL (XEXP (addr
, 0));
3147 ireg
= XEXP (addr
, 1);
3151 scale
= INTVAL (XEXP (addr
, 1));
3152 ireg
= XEXP (addr
, 0);
3154 output_addr_const (file
, const0_rtx
);
3155 PRINT_B_I_S ((rtx
) 0, ireg
, scale
, file
);
3160 if (GET_CODE (addr
) == CONST_INT
3161 && INTVAL (addr
) < 0x8000
3162 && INTVAL (addr
) >= -0x8000)
3163 fprintf (file
, "%d", INTVAL (addr
));
3167 output_pic_addr_const (file
, addr
, 0);
3169 output_addr_const (file
, addr
);
3174 /* Set the cc_status for the results of an insn whose pattern is EXP.
3175 On the 80386, we assume that only test and compare insns, as well
3176 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3177 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3178 Also, we assume that jumps, moves and sCOND don't affect the condition
3179 codes. All else clobbers the condition codes, by assumption.
3181 We assume that ALL integer add, minus, etc. instructions effect the
3182 condition codes. This MUST be consistent with i386.md.
3184 We don't record any float test or compare - the redundant test &
3185 compare check in final.c does not handle stack-like regs correctly. */
3188 notice_update_cc (exp
)
3191 if (GET_CODE (exp
) == SET
)
3193 /* Jumps do not alter the cc's. */
3194 if (SET_DEST (exp
) == pc_rtx
)
3196 #ifdef IS_STACK_MODE
3197 /* Moving into a memory of stack_mode may have been moved
3198 in between the use and set of cc0 by loop_spl(). So
3199 old value of cc.status must be retained */
3200 if(GET_CODE(SET_DEST(exp
))==MEM
3201 && IS_STACK_MODE(GET_MODE(SET_DEST(exp
))))
3206 /* Moving register or memory into a register:
3207 it doesn't alter the cc's, but it might invalidate
3208 the RTX's which we remember the cc's came from.
3209 (Note that moving a constant 0 or 1 MAY set the cc's). */
3210 if (REG_P (SET_DEST (exp
))
3211 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3212 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3214 if (cc_status
.value1
3215 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3216 cc_status
.value1
= 0;
3217 if (cc_status
.value2
3218 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3219 cc_status
.value2
= 0;
3222 /* Moving register into memory doesn't alter the cc's.
3223 It may invalidate the RTX's which we remember the cc's came from. */
3224 if (GET_CODE (SET_DEST (exp
)) == MEM
3225 && (REG_P (SET_SRC (exp
))
3226 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3228 if (cc_status
.value1
&& GET_CODE (cc_status
.value1
) == MEM
3229 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3230 cc_status
.value1
= 0;
3231 if (cc_status
.value2
&& GET_CODE (cc_status
.value2
) == MEM
3232 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3233 cc_status
.value2
= 0;
3236 /* Function calls clobber the cc's. */
3237 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3242 /* Tests and compares set the cc's in predictable ways. */
3243 else if (SET_DEST (exp
) == cc0_rtx
)
3246 cc_status
.value1
= SET_SRC (exp
);
3249 /* Certain instructions effect the condition codes. */
3250 else if (GET_MODE (SET_SRC (exp
)) == SImode
3251 || GET_MODE (SET_SRC (exp
)) == HImode
3252 || GET_MODE (SET_SRC (exp
)) == QImode
)
3253 switch (GET_CODE (SET_SRC (exp
)))
3255 case ASHIFTRT
: case LSHIFTRT
:
3257 /* Shifts on the 386 don't set the condition codes if the
3258 shift count is zero. */
3259 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3264 /* We assume that the CONST_INT is non-zero (this rtx would
3265 have been deleted if it were zero. */
3267 case PLUS
: case MINUS
: case NEG
:
3268 case AND
: case IOR
: case XOR
:
3269 cc_status
.flags
= CC_NO_OVERFLOW
;
3270 cc_status
.value1
= SET_SRC (exp
);
3271 cc_status
.value2
= SET_DEST (exp
);
3282 else if (GET_CODE (exp
) == PARALLEL
3283 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3285 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3287 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3290 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3291 cc_status
.flags
|= CC_IN_80387
;
3293 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3304 /* Split one or more DImode RTL references into pairs of SImode
3305 references. The RTL can be REG, offsettable MEM, integer constant, or
3306 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3307 split and "num" is its length. lo_half and hi_half are output arrays
3308 that parallel "operands". */
3311 split_di (operands
, num
, lo_half
, hi_half
)
3314 rtx lo_half
[], hi_half
[];
3318 if (GET_CODE (operands
[num
]) == REG
)
3320 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3321 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3323 else if (CONSTANT_P (operands
[num
]))
3325 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3327 else if (offsettable_memref_p (operands
[num
]))
3329 lo_half
[num
] = operands
[num
];
3330 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3337 /* Return 1 if this is a valid binary operation on a 387.
3338 OP is the expression matched, and MODE is its mode. */
3341 binary_387_op (op
, mode
)
3343 enum machine_mode mode
;
3345 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3348 switch (GET_CODE (op
))
3354 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3362 /* Return 1 if this is a valid shift or rotate operation on a 386.
3363 OP is the expression matched, and MODE is its mode. */
3368 enum machine_mode mode
;
3370 rtx operand
= XEXP (op
, 0);
3372 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3375 if (GET_MODE (operand
) != GET_MODE (op
)
3376 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3379 return (GET_CODE (op
) == ASHIFT
3380 || GET_CODE (op
) == ASHIFTRT
3381 || GET_CODE (op
) == LSHIFTRT
3382 || GET_CODE (op
) == ROTATE
3383 || GET_CODE (op
) == ROTATERT
);
3386 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3387 MODE is not used. */
3390 VOIDmode_compare_op (op
, mode
)
3392 enum machine_mode mode
;
3394 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3397 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3398 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3399 is the expression of the binary operation. The output may either be
3400 emitted here, or returned to the caller, like all output_* functions.
3402 There is no guarantee that the operands are the same mode, as they
3403 might be within FLOAT or FLOAT_EXTEND expressions. */
3406 output_387_binary_op (insn
, operands
)
3412 static char buf
[100];
3414 switch (GET_CODE (operands
[3]))
3417 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3418 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3425 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3426 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3433 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3434 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3441 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3442 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3452 strcpy (buf
, base_op
);
3454 switch (GET_CODE (operands
[3]))
3458 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3461 operands
[2] = operands
[1];
3465 if (GET_CODE (operands
[2]) == MEM
)
3466 return strcat (buf
, AS1 (%z2
,%2));
3468 if (NON_STACK_REG_P (operands
[1]))
3470 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3473 else if (NON_STACK_REG_P (operands
[2]))
3475 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3479 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3480 return strcat (buf
, AS2 (p
,%2,%0));
3482 if (STACK_TOP_P (operands
[0]))
3483 return strcat (buf
, AS2C (%y2
,%0));
3485 return strcat (buf
, AS2C (%2,%0));
3489 if (GET_CODE (operands
[1]) == MEM
)
3490 return strcat (buf
, AS1 (r
%z1
,%1));
3492 if (GET_CODE (operands
[2]) == MEM
)
3493 return strcat (buf
, AS1 (%z2
,%2));
3495 if (NON_STACK_REG_P (operands
[1]))
3497 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3500 else if (NON_STACK_REG_P (operands
[2]))
3502 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3506 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3509 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3510 return strcat (buf
, AS2 (rp
,%2,%0));
3512 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3513 return strcat (buf
, AS2 (p
,%1,%0));
3515 if (STACK_TOP_P (operands
[0]))
3517 if (STACK_TOP_P (operands
[1]))
3518 return strcat (buf
, AS2C (%y2
,%0));
3520 return strcat (buf
, AS2 (r
,%y1
,%0));
3522 else if (STACK_TOP_P (operands
[1]))
3523 return strcat (buf
, AS2C (%1,%0));
3525 return strcat (buf
, AS2 (r
,%2,%0));
3532 /* Output code for INSN to convert a float to a signed int. OPERANDS
3533 are the insn operands. The output may be SFmode or DFmode and the
3534 input operand may be SImode or DImode. As a special case, make sure
3535 that the 387 stack top dies if the output mode is DImode, because the
3536 hardware requires this. */
3539 output_fix_trunc (insn
, operands
)
3543 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3546 if (! STACK_TOP_P (operands
[1]) ||
3547 (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3550 xops
[0] = GEN_INT (12);
3551 xops
[1] = operands
[4];
3553 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3554 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3555 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3556 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3557 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3559 if (NON_STACK_REG_P (operands
[0]))
3560 output_to_reg (operands
[0], stack_top_dies
);
3561 else if (GET_CODE (operands
[0]) == MEM
)
3564 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3566 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3571 return AS1 (fldc
%W2
,%2);
3574 /* Output code for INSN to compare OPERANDS. The two operands might
3575 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3576 expression. If the compare is in mode CCFPEQmode, use an opcode that
3577 will not fault if a qNaN is present. */
3580 output_float_compare (insn
, operands
)
3585 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3586 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3587 int target_fcomi
= TARGET_CMOVE
&& STACK_REG_P (operands
[1]);
3590 if (! STACK_TOP_P (operands
[0]))
3593 operands
[0] = operands
[1];
3595 cc_status
.flags
|= CC_REVERSED
;
3598 if (! STACK_TOP_P (operands
[0]))
3601 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3603 if (STACK_REG_P (operands
[1])
3605 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3606 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3608 /* If both the top of the 387 stack dies, and the other operand
3609 is also a stack register that dies, then this must be a
3610 `fcompp' float compare */
3612 if (unordered_compare
)
3613 output_asm_insn ("fucompp", operands
);
3615 output_asm_insn ("fcompp", operands
);
3619 static char buf
[100];
3621 /* Decide if this is the integer or float compare opcode, or the
3622 unordered float compare. */
3624 if (unordered_compare
)
3625 strcpy (buf
, target_fcomi
? "fucomi" : "fucom");
3626 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
3627 strcpy (buf
, target_fcomi
? "fcomi" : "fcom");
3629 strcpy (buf
, "ficom");
3631 /* Modify the opcode if the 387 stack is to be popped. */
3636 if (NON_STACK_REG_P (operands
[1]))
3637 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3638 else if (target_fcomi
)
3642 xops
[0] = operands
[0];
3643 xops
[1] = operands
[1];
3644 xops
[2] = operands
[0];
3646 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%2)), xops
);
3650 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
3653 /* Now retrieve the condition code. */
3655 return output_fp_cc0_set (insn
);
3658 /* Output opcodes to transfer the results of FP compare or test INSN
3659 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3660 result of the compare or test is unordered, no comparison operator
3661 succeeds except NE. Return an output template, if any. */
3664 output_fp_cc0_set (insn
)
3668 rtx unordered_label
;
3672 xops
[0] = gen_rtx (REG
, HImode
, 0);
3673 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
3675 if (! TARGET_IEEE_FP
)
3677 if (!(cc_status
.flags
& CC_REVERSED
))
3679 next
= next_cc0_user (insn
);
3681 if (GET_CODE (next
) == JUMP_INSN
3682 && GET_CODE (PATTERN (next
)) == SET
3683 && SET_DEST (PATTERN (next
)) == pc_rtx
3684 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3686 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3688 else if (GET_CODE (PATTERN (next
)) == SET
)
3690 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3696 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
3697 || code
== LE
|| code
== GE
)
3698 { /* We will test eax directly */
3699 cc_status
.flags
|= CC_TEST_AX
;
3706 next
= next_cc0_user (insn
);
3707 if (next
== NULL_RTX
)
3710 if (GET_CODE (next
) == JUMP_INSN
3711 && GET_CODE (PATTERN (next
)) == SET
3712 && SET_DEST (PATTERN (next
)) == pc_rtx
3713 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3715 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3717 else if (GET_CODE (PATTERN (next
)) == SET
)
3719 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3720 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3721 else code
= GET_CODE (SET_SRC (PATTERN (next
)));
3726 xops
[0] = gen_rtx (REG
, QImode
, 0);
3731 xops
[1] = GEN_INT (0x45);
3732 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3737 xops
[1] = GEN_INT (0x45);
3738 xops
[2] = GEN_INT (0x01);
3739 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3740 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3745 xops
[1] = GEN_INT (0x05);
3746 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3751 xops
[1] = GEN_INT (0x45);
3752 xops
[2] = GEN_INT (0x40);
3753 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3754 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
3755 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3760 xops
[1] = GEN_INT (0x45);
3761 xops
[2] = GEN_INT (0x40);
3762 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3763 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3768 xops
[1] = GEN_INT (0x44);
3769 xops
[2] = GEN_INT (0x40);
3770 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3771 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
3785 #define MAX_386_STACK_LOCALS 2
3787 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3789 /* Define the structure for the machine field in struct function. */
3790 struct machine_function
3792 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3795 /* Functions to save and restore i386_stack_locals.
3796 These will be called, via pointer variables,
3797 from push_function_context and pop_function_context. */
3800 save_386_machine_status (p
)
3803 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
3804 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
3805 sizeof i386_stack_locals
);
3809 restore_386_machine_status (p
)
3812 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
3813 sizeof i386_stack_locals
);
3817 /* Clear stack slot assignments remembered from previous functions.
3818 This is called from INIT_EXPANDERS once before RTL is emitted for each
3822 clear_386_stack_locals ()
3824 enum machine_mode mode
;
3827 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
3828 mode
= (enum machine_mode
) ((int) mode
+ 1))
3829 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
3830 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
3832 /* Arrange to save and restore i386_stack_locals around nested functions. */
3833 save_machine_status
= save_386_machine_status
;
3834 restore_machine_status
= restore_386_machine_status
;
3837 /* Return a MEM corresponding to a stack slot with mode MODE.
3838 Allocate a new slot if necessary.
3840 The RTL for a function can have several slots available: N is
3841 which slot to use. */
3844 assign_386_stack_local (mode
, n
)
3845 enum machine_mode mode
;
3848 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
3851 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
3852 i386_stack_locals
[(int) mode
][n
]
3853 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
3855 return i386_stack_locals
[(int) mode
][n
];
3861 enum machine_mode mode
;
3863 return (GET_CODE (op
) == MULT
);
3868 enum machine_mode mode
;
3870 return (GET_CODE (op
) == DIV
);
3875 /* Create a new copy of an rtx.
3876 Recursively copies the operands of the rtx,
3877 except for those few rtx codes that are sharable.
3878 Doesn't share CONST */
3886 register RTX_CODE code
;
3887 register char *format_ptr
;
3889 code
= GET_CODE (orig
);
3902 /* SCRATCH must be shared because they represent distinct values. */
3907 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3908 a LABEL_REF, it isn't sharable. */
3909 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3910 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
3911 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
3915 /* A MEM with a constant address is not sharable. The problem is that
3916 the constant address may need to be reloaded. If the mem is shared,
3917 then reloading one copy of this mem will cause all copies to appear
3918 to have been reloaded. */
3921 copy
= rtx_alloc (code
);
3922 PUT_MODE (copy
, GET_MODE (orig
));
3923 copy
->in_struct
= orig
->in_struct
;
3924 copy
->volatil
= orig
->volatil
;
3925 copy
->unchanging
= orig
->unchanging
;
3926 copy
->integrated
= orig
->integrated
;
3928 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
3930 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
3932 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
3934 switch (*format_ptr
++)
3937 XEXP (copy
, i
) = XEXP (orig
, i
);
3938 if (XEXP (orig
, i
) != NULL
)
3939 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
3944 XEXP (copy
, i
) = XEXP (orig
, i
);
3949 XVEC (copy
, i
) = XVEC (orig
, i
);
3950 if (XVEC (orig
, i
) != NULL
)
3952 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
3953 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
3954 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
3959 XWINT (copy
, i
) = XWINT (orig
, i
);
3963 XINT (copy
, i
) = XINT (orig
, i
);
3968 XSTR (copy
, i
) = XSTR (orig
, i
);
3979 /* try to rewrite a memory address to make it valid */
3981 rewrite_address (mem_rtx
)
3984 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
3986 int offset_adjust
= 0;
3987 int was_only_offset
= 0;
3988 rtx mem_addr
= XEXP (mem_rtx
, 0);
3989 char *storage
= (char *) oballoc (0);
3991 int is_spill_rtx
= 0;
3993 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
3994 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
3996 if (GET_CODE (mem_addr
) == PLUS
&&
3997 GET_CODE (XEXP (mem_addr
, 1)) == PLUS
&&
3998 GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
3999 { /* this part is utilized by the combiner */
4001 gen_rtx (PLUS
, GET_MODE (mem_addr
),
4002 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4004 XEXP (XEXP (mem_addr
, 1), 0)),
4005 XEXP (XEXP (mem_addr
, 1), 1));
4006 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4008 XEXP (mem_rtx
, 0) = ret_rtx
;
4009 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4015 /* this part is utilized by loop.c */
4016 /* If the address contains PLUS (reg,const) and this pattern is invalid
4017 in this case - try to rewrite the address to make it valid intel1
4019 storage
= (char *) oballoc (0);
4020 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4021 /* find the base index and offset elements of the memory address */
4022 if (GET_CODE (mem_addr
) == PLUS
)
4024 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4026 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4028 base_rtx
= XEXP (mem_addr
, 1);
4029 index_rtx
= XEXP (mem_addr
, 0);
4033 base_rtx
= XEXP (mem_addr
, 0);
4034 offset_rtx
= XEXP (mem_addr
, 1);
4037 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4039 index_rtx
= XEXP (mem_addr
, 0);
4040 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4042 base_rtx
= XEXP (mem_addr
, 1);
4046 offset_rtx
= XEXP (mem_addr
, 1);
4049 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4052 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
&&
4053 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
&&
4054 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0)) == REG
&&
4055 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1)) == CONST_INT
&&
4056 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1)) == CONST_INT
&&
4057 GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
&&
4058 GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4060 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4061 offset_rtx
= XEXP (mem_addr
, 1);
4062 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4063 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4067 offset_rtx
= XEXP (mem_addr
, 1);
4068 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4069 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4072 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4074 was_only_offset
= 1;
4077 offset_rtx
= XEXP (mem_addr
, 1);
4078 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4079 if (offset_adjust
== 0)
4081 XEXP (mem_rtx
, 0) = offset_rtx
;
4082 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4092 else if (GET_CODE (mem_addr
) == MULT
)
4094 index_rtx
= mem_addr
;
4101 if (index_rtx
&& GET_CODE (index_rtx
) == MULT
)
4103 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4108 scale_rtx
= XEXP (index_rtx
, 1);
4109 scale
= INTVAL (scale_rtx
);
4110 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4112 /* now find which of the elements are invalid and try to fix them */
4113 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4115 offset_adjust
= INTVAL (index_rtx
) * scale
;
4116 if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST
&&
4117 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4119 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4120 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4122 offset_rtx
= copy_all_rtx (offset_rtx
);
4123 XEXP (XEXP (offset_rtx
, 0), 1) =
4124 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4125 if (!CONSTANT_P (offset_rtx
))
4132 else if (offset_rtx
&& GET_CODE (offset_rtx
) == SYMBOL_REF
)
4135 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4136 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4138 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4139 if (!CONSTANT_P (offset_rtx
))
4145 else if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST_INT
)
4147 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4149 else if (!offset_rtx
)
4151 offset_rtx
= gen_rtx (CONST_INT
, 0, 0);
4153 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4154 XEXP (mem_rtx
, 0) = offset_rtx
;
4157 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
&&
4158 GET_CODE (XEXP (base_rtx
, 0)) == REG
&&
4159 GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4161 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4162 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4164 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4166 offset_adjust
+= INTVAL (base_rtx
);
4169 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
&&
4170 GET_CODE (XEXP (index_rtx
, 0)) == REG
&&
4171 GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4173 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4174 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4178 if (!LEGITIMATE_INDEX_P (index_rtx
)
4179 && !(index_rtx
== stack_pointer_rtx
&& scale
== 1 && base_rtx
== NULL
))
4187 if (!LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4193 if (offset_adjust
!= 0)
4197 if (GET_CODE (offset_rtx
) == CONST
&&
4198 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4200 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4201 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4203 offset_rtx
= copy_all_rtx (offset_rtx
);
4204 XEXP (XEXP (offset_rtx
, 0), 1) =
4205 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4206 if (!CONSTANT_P (offset_rtx
))
4213 else if (GET_CODE (offset_rtx
) == SYMBOL_REF
)
4216 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4217 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4219 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4220 if (!CONSTANT_P (offset_rtx
))
4226 else if (GET_CODE (offset_rtx
) == CONST_INT
)
4228 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4238 offset_rtx
= gen_rtx (CONST_INT
, 0, offset_adjust
);
4246 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4247 INTVAL (offset_rtx
) == 0)
4249 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4250 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4256 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4257 gen_rtx (PLUS
, GET_MODE (base_rtx
),
4258 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4266 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4267 INTVAL (offset_rtx
) == 0)
4269 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, base_rtx
);
4273 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4274 gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
,
4284 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4285 INTVAL (offset_rtx
) == 0)
4287 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
, scale_rtx
);
4292 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4293 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4300 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4301 INTVAL (offset_rtx
) == 0)
4303 ret_rtx
= index_rtx
;
4307 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, offset_rtx
);
4316 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4317 INTVAL (offset_rtx
) == 0)
4323 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
, offset_rtx
);
4326 else if (was_only_offset
)
4328 ret_rtx
= offset_rtx
;
4336 XEXP (mem_rtx
, 0) = ret_rtx
;
4337 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4349 /* return 1 if the first insn to set cc before insn also sets the register
4350 reg_rtx - otherwise return 0 */
4352 last_to_set_cc (reg_rtx
, insn
)
4355 rtx prev_insn
= PREV_INSN (insn
);
4359 if (GET_CODE (prev_insn
) == NOTE
)
4362 else if (GET_CODE (prev_insn
) == INSN
)
4364 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4367 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4369 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4375 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4382 prev_insn
= PREV_INSN (prev_insn
);
4390 doesnt_set_condition_code (pat
)
4393 switch (GET_CODE (pat
))
4407 sets_condition_code (pat
)
4410 switch (GET_CODE (pat
))
4434 str_immediate_operand (op
, mode
)
4436 enum machine_mode mode
;
4438 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4450 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4451 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4452 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4453 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4462 Return 1 if the mode of the SET_DEST of insn is floating point
4463 and it is not an fld or a move from memory to memory.
4464 Otherwise return 0 */
4469 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4470 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4471 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4472 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4473 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4474 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4475 && GET_CODE (SET_SRC (insn
)) != MEM
)
4484 Return 1 if the mode of the SET_DEST floating point and is memory
4485 and the source is a register.
4491 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4492 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4493 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4494 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4495 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4496 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4506 Return 1 if dep_insn sets a register which insn uses as a base
4507 or index to reference memory.
4508 otherwise return 0 */
4511 agi_dependent (insn
, dep_insn
)
4514 if (GET_CODE (dep_insn
) == INSN
4515 && GET_CODE (PATTERN (dep_insn
)) == SET
4516 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4518 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
));
4521 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4522 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4523 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4524 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4526 return (reg_mentioned_in_mem (stack_pointer_rtx
, insn
));
4534 Return 1 if reg is used in rtl as a base or index for a memory ref
4535 otherwise return 0. */
4538 reg_mentioned_in_mem (reg
, rtl
)
4543 register enum rtx_code code
;
4548 code
= GET_CODE (rtl
);
4566 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4569 fmt
= GET_RTX_FORMAT (code
);
4570 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4575 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4577 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4582 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4589 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4591 operands[0] = result, initialized with the startaddress
4592 operands[1] = alignment of the address.
4593 operands[2] = scratch register, initialized with the startaddress when
4594 not aligned, otherwise undefined
4596 This is just the body. It needs the initialisations mentioned above and
4597 some address computing at the end. These things are done in i386.md. */
4600 output_strlen_unroll (operands
)
4605 xops
[0] = operands
[0]; /* Result */
4606 /* operands[1]; * Alignment */
4607 xops
[1] = operands
[2]; /* Scratch */
4608 xops
[2] = GEN_INT (0);
4609 xops
[3] = GEN_INT (2);
4610 xops
[4] = GEN_INT (3);
4611 xops
[5] = GEN_INT (4);
4612 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4613 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4614 xops
[8] = gen_label_rtx (); /* label of main loop */
4615 if(TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4616 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4617 xops
[10] = gen_label_rtx (); /* end label 2 */
4618 xops
[11] = gen_label_rtx (); /* end label 1 */
4619 xops
[12] = gen_label_rtx (); /* end label */
4620 /* xops[13] * Temporary used */
4621 xops
[14] = GEN_INT (0xff);
4622 xops
[15] = GEN_INT (0xff00);
4623 xops
[16] = GEN_INT (0xff0000);
4624 xops
[17] = GEN_INT (0xff000000);
4626 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4628 /* is there a known alignment and is it less then 4 */
4629 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4631 /* is there a known alignment and is it not 2 */
4632 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4634 xops
[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4635 xops
[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4637 /* leave just the 3 lower bits */
4638 /* if this is a q-register, then the high part is used later */
4639 /* therefore user andl rather than andb */
4640 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4641 /* is aligned to 4-byte adress when zero */
4642 output_asm_insn (AS1 (je
,%l8
), xops
);
4643 /* side-effect even Parity when %eax == 3 */
4644 output_asm_insn (AS1 (jp
,%6), xops
);
4646 /* is it aligned to 2 bytes ? */
4647 if (QI_REG_P (xops
[1]))
4648 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4650 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4651 output_asm_insn (AS1 (je
,%7), xops
);
4655 /* since the alignment is 2, we have to check 2 or 0 bytes */
4657 /* check if is aligned to 4 - byte */
4658 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4659 /* is aligned to 4-byte adress when zero */
4660 output_asm_insn (AS1 (je
,%l8
), xops
);
4663 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4664 /* now, compare the bytes */
4665 /* compare with the high part of a q-reg gives shorter code */
4666 if (QI_REG_P (xops
[1]))
4668 /* compare the first n unaligned byte on a byte per byte basis */
4669 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4670 /* when zero we reached the end */
4671 output_asm_insn (AS1 (je
,%l12
), xops
);
4672 /* increment the address */
4673 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4675 /* not needed with an alignment of 2 */
4676 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4678 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4679 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4680 output_asm_insn (AS1 (je
,%l12
), xops
);
4681 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4683 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4685 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4689 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4690 output_asm_insn (AS1 (je
,%l12
), xops
);
4691 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4693 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4694 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4695 output_asm_insn (AS1 (je
,%l12
), xops
);
4696 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4698 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4699 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4701 output_asm_insn (AS1 (je
,%l12
), xops
);
4702 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4705 /* Generate loop to check 4 bytes at a time */
4706 /* IMHO it is not a good idea to align this loop. It gives only */
4707 /* huge programs, but does not help to speed up */
4708 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4709 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4711 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4712 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4714 if (QI_REG_P (xops
[1]))
4716 /* On i586 it is faster to combine the hi- and lo- part as
4717 a kind of lookahead. If anding both yields zero, then one
4718 of both *could* be zero, otherwise none of both is zero;
4719 this saves one instruction, on i486 this is slower
4720 tested with P-90, i486DX2-66, AMD486DX2-66 */
4723 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
4724 output_asm_insn (AS1 (jne
,%l9
), xops
);
4727 /* check first byte */
4728 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
4729 output_asm_insn (AS1 (je
,%l12
), xops
);
4731 /* check second byte */
4732 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
4733 output_asm_insn (AS1 (je
,%l11
), xops
);
4736 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[9]));
4740 /* check first byte */
4741 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
4742 output_asm_insn (AS1 (je
,%l12
), xops
);
4744 /* check second byte */
4745 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
4746 output_asm_insn (AS1 (je
,%l11
), xops
);
4749 /* check third byte */
4750 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
4751 output_asm_insn (AS1 (je
,%l10
), xops
);
4753 /* check fourth byte and increment address */
4754 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
4755 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
4756 output_asm_insn (AS1 (jne
,%l8
), xops
);
4758 /* now generate fixups when the compare stops within a 4-byte word */
4759 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
4761 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
4762 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4764 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
4765 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4767 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));