1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
38 #ifdef EXTRA_CONSTRAINT
39 /* If EXTRA_CONSTRAINT is defined, then the 'S'
40 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
41 asm statements that need 'S' for class SIREG will break. */
42 error EXTRA_CONSTRAINT conflicts with S constraint letter
43 /* The previous line used to be #error, but some compilers barf
44 even if the conditional was untrue. */
47 enum reg_mem
/* Type of an operand for ix86_{binary,unary}_operator_ok */
54 /* Processor costs (relative to an add) */
55 struct processor_costs i386_cost
= { /* 386 specific costs */
56 1, /* cost of an add instruction (2 cycles) */
57 1, /* cost of a lea instruction */
58 3, /* variable shift costs */
59 2, /* constant shift costs */
60 6, /* cost of starting a multiply */
61 1, /* cost of multiply per each bit set */
62 23 /* cost of a divide/mod */
65 struct processor_costs i486_cost
= { /* 486 specific costs */
66 1, /* cost of an add instruction */
67 1, /* cost of a lea instruction */
68 3, /* variable shift costs */
69 2, /* constant shift costs */
70 12, /* cost of starting a multiply */
71 1, /* cost of multiply per each bit set */
72 40 /* cost of a divide/mod */
75 struct processor_costs pentium_cost
= {
76 1, /* cost of an add instruction */
77 1, /* cost of a lea instruction */
78 3, /* variable shift costs */
79 1, /* constant shift costs */
80 12, /* cost of starting a multiply */
81 1, /* cost of multiply per each bit set */
82 25 /* cost of a divide/mod */
85 struct processor_costs
*ix86_cost
= &pentium_cost
;
87 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
89 extern FILE *asm_out_file
;
90 extern char *strcat ();
92 char *singlemove_string ();
93 char *output_move_const_single ();
94 char *output_fp_cc0_set ();
96 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
97 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
98 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
100 /* Array of the smallest class containing reg number REGNO, indexed by
101 REGNO. Used by REGNO_REG_CLASS in i386.h. */
103 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
106 AREG
, DREG
, CREG
, BREG
,
108 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
110 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
111 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
116 /* Test and compare insns in i386.md store the information needed to
117 generate branch and scc insns here. */
119 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
120 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
121 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
123 /* which cpu are we scheduling for */
124 enum processor_type ix86_cpu
;
126 /* which instruction set architecture to use. */
129 /* Strings to hold which cpu and instruction set architecture to use. */
130 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
131 char *ix86_isa_string
; /* for -misa=<xxx> */
133 /* Register allocation order */
134 char *i386_reg_alloc_order
;
135 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
137 /* # of registers to use to pass arguments. */
138 char *i386_regparm_string
; /* # registers to use to pass args */
139 int i386_regparm
; /* i386_regparm_string as a number */
141 /* Alignment to use for loops and jumps */
142 char *i386_align_loops_string
; /* power of two alignment for loops */
143 char *i386_align_jumps_string
; /* power of two alignment for non-loop jumps */
144 char *i386_align_funcs_string
; /* power of two alignment for functions */
146 int i386_align_loops
; /* power of two alignment for loops */
147 int i386_align_jumps
; /* power of two alignment for non-loop jumps */
148 int i386_align_funcs
; /* power of two alignment for functions */
150 /* Sometimes certain combinations of command options do not make
151 sense on a particular target machine. You can define a macro
152 `OVERRIDE_OPTIONS' to take account of this. This macro, if
153 defined, is executed once just after all the command options have
156 Don't use this macro to turn on various extra optimizations for
157 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
168 char *name
; /* Canonical processor name. */
169 enum processor_type processor
; /* Processor type enum value. */
170 struct processor_costs
*cost
; /* Processor costs */
171 int target_enable
; /* Target flags to enable. */
172 int target_disable
; /* Target flags to disable. */
173 } processor_target_table
[]
174 = {{PROCESSOR_COMMON_STRING
, PROCESSOR_COMMON
, &i486_cost
, 0, 0},
175 {PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
176 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
177 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
178 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
179 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0},
180 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0}};
182 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
184 #ifdef SUBTARGET_OVERRIDE_OPTIONS
185 SUBTARGET_OVERRIDE_OPTIONS
;
188 /* Validate registers in register allocation order */
189 if (i386_reg_alloc_order
)
191 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
195 case 'a': regno
= 0; break;
196 case 'd': regno
= 1; break;
197 case 'c': regno
= 2; break;
198 case 'b': regno
= 3; break;
199 case 'S': regno
= 4; break;
200 case 'D': regno
= 5; break;
201 case 'B': regno
= 6; break;
203 default: fatal ("Register '%c' is unknown", ch
);
206 if (regs_allocated
[regno
])
207 fatal ("Register '%c' was already specified in the allocation order", ch
);
209 regs_allocated
[regno
] = 1;
213 /* Get the architectural level. */
214 if (ix86_isa_string
== (char *)0)
215 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
217 for (i
= 0; i
< ptt_size
; i
++)
218 if (! strcmp (ix86_isa_string
, processor_target_table
[i
].name
))
220 ix86_isa
= processor_target_table
[i
].processor
;
221 if (ix86_cpu_string
== (char *)0)
222 ix86_cpu_string
= processor_target_table
[i
].name
;
228 error ("bad value (%s) for -misa= switch", ix86_isa_string
);
229 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
230 ix86_isa
= PROCESSOR_DEFAULT
;
233 for (j
= 0; j
< ptt_size
; j
++)
234 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
236 ix86_cpu
= processor_target_table
[j
].processor
;
237 if (i
> j
&& (int)ix86_isa
>= (int)PROCESSOR_PENTIUMPRO
)
238 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string
, ix86_isa_string
);
240 target_flags
|= processor_target_table
[j
].target_enable
;
241 target_flags
&= ~processor_target_table
[j
].target_disable
;
247 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
248 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
249 ix86_cpu
= PROCESSOR_DEFAULT
;
252 /* Validate -mregparm= value */
253 if (i386_regparm_string
)
255 i386_regparm
= atoi (i386_regparm_string
);
256 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
257 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm
, REGPARM_MAX
);
260 def_align
= (TARGET_386
) ? 2 : 4;
262 /* Validate -malign-loops= value, or provide default */
263 if (i386_align_loops_string
)
265 i386_align_loops
= atoi (i386_align_loops_string
);
266 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
267 fatal ("-malign-loops=%d is not between 0 and %d",
268 i386_align_loops
, MAX_CODE_ALIGN
);
271 i386_align_loops
= 2;
273 /* Validate -malign-jumps= value, or provide default */
274 if (i386_align_jumps_string
)
276 i386_align_jumps
= atoi (i386_align_jumps_string
);
277 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
278 fatal ("-malign-jumps=%d is not between 0 and %d",
279 i386_align_jumps
, MAX_CODE_ALIGN
);
282 i386_align_jumps
= def_align
;
284 /* Validate -malign-functions= value, or provide default */
285 if (i386_align_funcs_string
)
287 i386_align_funcs
= atoi (i386_align_funcs_string
);
288 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
289 fatal ("-malign-functions=%d is not between 0 and %d",
290 i386_align_funcs
, MAX_CODE_ALIGN
);
293 i386_align_funcs
= def_align
;
295 if (TARGET_OMIT_LEAF_FRAME_POINTER
) /* keep nonleaf frame pointers */
296 flag_omit_frame_pointer
= 1;
298 /* pic references don't explicitly mention pic_offset_table_rtx */
300 target_flags
&= ~MASK_SCHEDULE_PROLOGUE
;
303 /* A C statement (sans semicolon) to choose the order in which to
304 allocate hard registers for pseudo-registers local to a basic
307 Store the desired register order in the array `reg_alloc_order'.
308 Element 0 should be the register to allocate first; element 1, the
309 next register; and so on.
311 The macro body should not assume anything about the contents of
312 `reg_alloc_order' before execution of the macro.
314 On most machines, it is not necessary to define this macro. */
317 order_regs_for_local_alloc ()
319 int i
, ch
, order
, regno
;
321 /* User specified the register allocation order */
322 if (i386_reg_alloc_order
)
324 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
328 case 'a': regno
= 0; break;
329 case 'd': regno
= 1; break;
330 case 'c': regno
= 2; break;
331 case 'b': regno
= 3; break;
332 case 'S': regno
= 4; break;
333 case 'D': regno
= 5; break;
334 case 'B': regno
= 6; break;
337 reg_alloc_order
[order
++] = regno
;
340 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
342 if (!regs_allocated
[i
])
343 reg_alloc_order
[order
++] = i
;
347 /* If users did not specify a register allocation order, use natural order */
350 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
351 reg_alloc_order
[i
] = i
;
357 optimization_options (level
)
360 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
361 make the problem with not enough registers even worse */
362 #ifdef INSN_SCHEDULING
364 flag_schedule_insns
= 0;
368 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
369 attribute for DECL. The attributes in ATTRIBUTES have previously been
373 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
382 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
383 attribute for TYPE. The attributes in ATTRIBUTES have previously been
387 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
393 if (TREE_CODE (type
) != FUNCTION_TYPE
394 && TREE_CODE (type
) != FIELD_DECL
395 && TREE_CODE (type
) != TYPE_DECL
)
398 /* Stdcall attribute says callee is responsible for popping arguments
399 if they are not variable. */
400 if (is_attribute_p ("stdcall", identifier
))
401 return (args
== NULL_TREE
);
403 /* Cdecl attribute says the callee is a normal C declaration */
404 if (is_attribute_p ("cdecl", identifier
))
405 return (args
== NULL_TREE
);
407 /* Regparm attribute specifies how many integer arguments are to be
408 passed in registers */
409 if (is_attribute_p ("regparm", identifier
))
413 if (!args
|| TREE_CODE (args
) != TREE_LIST
414 || TREE_CHAIN (args
) != NULL_TREE
415 || TREE_VALUE (args
) == NULL_TREE
)
418 cst
= TREE_VALUE (args
);
419 if (TREE_CODE (cst
) != INTEGER_CST
)
422 if (TREE_INT_CST_HIGH (cst
) != 0
423 || TREE_INT_CST_LOW (cst
) < 0
424 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
433 /* Return 0 if the attributes for two types are incompatible, 1 if they
434 are compatible, and 2 if they are nearly compatible (which causes a
435 warning to be generated). */
438 i386_comp_type_attributes (type1
, type2
)
446 /* Value is the number of bytes of arguments automatically
447 popped when returning from a subroutine call.
448 FUNDECL is the declaration node of the function (as a tree),
449 FUNTYPE is the data type of the function (as a tree),
450 or for a library call it is an identifier node for the subroutine name.
451 SIZE is the number of bytes of arguments passed on the stack.
453 On the 80386, the RTD insn may be used to pop them if the number
454 of args is fixed, but if the number is variable then the caller
455 must pop them all. RTD can't be used for library calls now
456 because the library is compiled with the Unix compiler.
457 Use of RTD is a selectable option, since it is incompatible with
458 standard Unix calling sequences. If the option is not selected,
459 the caller must always pop the args.
461 The attribute stdcall is equivalent to RTD on a per module basis. */
464 i386_return_pops_args (fundecl
, funtype
, size
)
469 int rtd
= TARGET_RTD
;
471 if (TREE_CODE (funtype
) == IDENTIFIER_NODE
)
474 /* Cdecl functions override -mrtd, and never pop the stack */
475 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
477 /* Stdcall functions will pop the stack if not variable args */
478 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
482 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
483 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
)))
487 /* Lose any fake structure return argument */
488 if (aggregate_value_p (TREE_TYPE (funtype
)))
489 return GET_MODE_SIZE (Pmode
);
495 /* Argument support functions. */
497 /* Initialize a variable CUM of type CUMULATIVE_ARGS
498 for a call to a function whose data type is FNTYPE.
499 For a library call, FNTYPE is 0. */
502 init_cumulative_args (cum
, fntype
, libname
)
503 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
504 tree fntype
; /* tree ptr for function decl */
505 rtx libname
; /* SYMBOL_REF of library name or 0 */
507 static CUMULATIVE_ARGS zero_cum
;
508 tree param
, next_param
;
510 if (TARGET_DEBUG_ARG
)
512 fprintf (stderr
, "\ninit_cumulative_args (");
515 tree ret_type
= TREE_TYPE (fntype
);
516 fprintf (stderr
, "fntype code = %s, ret code = %s",
517 tree_code_name
[ (int)TREE_CODE (fntype
) ],
518 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
521 fprintf (stderr
, "no fntype");
524 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
529 /* Set up the number of registers to use for passing arguments. */
530 cum
->nregs
= i386_regparm
;
533 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
535 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
538 /* Determine if this function has variable arguments. This is
539 indicated by the last argument being 'void_type_mode' if there
540 are no variable arguments. If there are variable arguments, then
541 we won't pass anything in registers */
545 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
549 next_param
= TREE_CHAIN (param
);
550 if (next_param
== (tree
)0 && TREE_VALUE (param
) != void_type_node
)
555 if (TARGET_DEBUG_ARG
)
556 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
561 /* Update the data in CUM to advance over an argument
562 of mode MODE and data type TYPE.
563 (TYPE is null for libcalls where that information may not be available.) */
566 function_arg_advance (cum
, mode
, type
, named
)
567 CUMULATIVE_ARGS
*cum
; /* current arg information */
568 enum machine_mode mode
; /* current arg mode */
569 tree type
; /* type of the argument or 0 if lib support */
570 int named
; /* whether or not the argument was named */
572 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
573 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
575 if (TARGET_DEBUG_ARG
)
577 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
578 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
593 /* Define where to put the arguments to a function.
594 Value is zero to push the argument on the stack,
595 or a hard register in which to store the argument.
597 MODE is the argument's machine mode.
598 TYPE is the data type of the argument (as a tree).
599 This is null for libcalls where that information may
601 CUM is a variable of type CUMULATIVE_ARGS which gives info about
602 the preceding args and about the function being called.
603 NAMED is nonzero if this argument is a named parameter
604 (otherwise it is an extra parameter matching an ellipsis). */
607 function_arg (cum
, mode
, type
, named
)
608 CUMULATIVE_ARGS
*cum
; /* current arg information */
609 enum machine_mode mode
; /* current arg mode */
610 tree type
; /* type of the argument or 0 if lib support */
611 int named
; /* != 0 for normal args, == 0 for ... args */
614 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
615 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
619 default: /* for now, pass fp/complex values on the stack */
627 if (words
<= cum
->nregs
)
628 ret
= gen_rtx (REG
, mode
, cum
->regno
);
632 if (TARGET_DEBUG_ARG
)
635 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
636 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
639 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
641 fprintf (stderr
, ", stack");
643 fprintf (stderr
, " )\n");
649 /* For an arg passed partly in registers and partly in memory,
650 this is the number of registers used.
651 For args passed entirely in registers or entirely in memory, zero. */
654 function_arg_partial_nregs (cum
, mode
, type
, named
)
655 CUMULATIVE_ARGS
*cum
; /* current arg information */
656 enum machine_mode mode
; /* current arg mode */
657 tree type
; /* type of the argument or 0 if lib support */
658 int named
; /* != 0 for normal args, == 0 for ... args */
664 /* Output an insn whose source is a 386 integer register. SRC is the
665 rtx for the register, and TEMPLATE is the op-code template. SRC may
666 be either SImode or DImode.
668 The template will be output with operands[0] as SRC, and operands[1]
669 as a pointer to the top of the 386 stack. So a call from floatsidf2
670 would look like this:
672 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
674 where %z0 corresponds to the caller's operands[1], and is used to
675 emit the proper size suffix.
677 ??? Extend this to handle HImode - a 387 can load and store HImode
681 output_op_from_reg (src
, template)
686 int size
= GET_MODE_SIZE (GET_MODE (src
));
689 xops
[1] = AT_SP (Pmode
);
690 xops
[2] = GEN_INT (size
);
691 xops
[3] = stack_pointer_rtx
;
693 if (size
> UNITS_PER_WORD
)
696 if (size
> 2 * UNITS_PER_WORD
)
698 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
699 output_asm_insn (AS1 (push
%L0
,%0), &high
);
701 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
702 output_asm_insn (AS1 (push
%L0
,%0), &high
);
704 output_asm_insn (AS1 (push
%L0
,%0), &src
);
706 output_asm_insn (template, xops
);
708 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
711 /* Output an insn to pop an value from the 387 top-of-stack to 386
712 register DEST. The 387 register stack is popped if DIES is true. If
713 the mode of DEST is an integer mode, a `fist' integer store is done,
714 otherwise a `fst' float store is done. */
717 output_to_reg (dest
, dies
)
722 int size
= GET_MODE_SIZE (GET_MODE (dest
));
724 xops
[0] = AT_SP (Pmode
);
725 xops
[1] = stack_pointer_rtx
;
726 xops
[2] = GEN_INT (size
);
729 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
731 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
734 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
736 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
738 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
741 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
744 if (GET_MODE (dest
) == XFmode
)
746 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
747 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
750 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
756 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
758 if (size
> UNITS_PER_WORD
)
760 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
761 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
762 if (size
> 2 * UNITS_PER_WORD
)
764 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
765 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
771 singlemove_string (operands
)
775 if (GET_CODE (operands
[0]) == MEM
776 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
778 if (XEXP (x
, 0) != stack_pointer_rtx
)
782 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
784 return output_move_const_single (operands
);
786 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
787 return AS2 (mov
%L0
,%1,%0);
788 else if (CONSTANT_P (operands
[1]))
789 return AS2 (mov
%L0
,%1,%0);
792 output_asm_insn ("push%L1 %1", operands
);
797 /* Return a REG that occurs in ADDR with coefficient 1.
798 ADDR can be effectively incremented by incrementing REG. */
804 while (GET_CODE (addr
) == PLUS
)
806 if (GET_CODE (XEXP (addr
, 0)) == REG
)
807 addr
= XEXP (addr
, 0);
808 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
809 addr
= XEXP (addr
, 1);
810 else if (CONSTANT_P (XEXP (addr
, 0)))
811 addr
= XEXP (addr
, 1);
812 else if (CONSTANT_P (XEXP (addr
, 1)))
813 addr
= XEXP (addr
, 0);
817 if (GET_CODE (addr
) == REG
)
823 /* Output an insn to add the constant N to the register X. */
834 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
836 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
839 xops
[1] = GEN_INT (-n
);
840 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
844 xops
[1] = GEN_INT (n
);
845 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
850 /* Output assembler code to perform a doubleword move insn
851 with operands OPERANDS. */
854 output_move_double (operands
)
857 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
861 rtx addreg0
= 0, addreg1
= 0;
862 int dest_overlapped_low
= 0;
863 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
868 /* First classify both operands. */
870 if (REG_P (operands
[0]))
872 else if (offsettable_memref_p (operands
[0]))
874 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
876 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
878 else if (GET_CODE (operands
[0]) == MEM
)
883 if (REG_P (operands
[1]))
885 else if (CONSTANT_P (operands
[1]))
887 else if (offsettable_memref_p (operands
[1]))
889 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
891 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
893 else if (GET_CODE (operands
[1]) == MEM
)
898 /* Check for the cases that the operand constraints are not
899 supposed to allow to happen. Abort if we get one,
900 because generating code for these cases is painful. */
902 if (optype0
== RNDOP
|| optype1
== RNDOP
)
905 /* If one operand is decrementing and one is incrementing
906 decrement the former register explicitly
907 and change that operand into ordinary indexing. */
909 if (optype0
== PUSHOP
&& optype1
== POPOP
)
911 /* ??? Can this ever happen on i386? */
912 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
913 asm_add (-size
, operands
[0]);
914 if (GET_MODE (operands
[1]) == XFmode
)
915 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
916 else if (GET_MODE (operands
[0]) == DFmode
)
917 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
919 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
923 if (optype0
== POPOP
&& optype1
== PUSHOP
)
925 /* ??? Can this ever happen on i386? */
926 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
927 asm_add (-size
, operands
[1]);
928 if (GET_MODE (operands
[1]) == XFmode
)
929 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
930 else if (GET_MODE (operands
[1]) == DFmode
)
931 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
933 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
937 /* If an operand is an unoffsettable memory ref, find a register
938 we can increment temporarily to make it refer to the second word. */
940 if (optype0
== MEMOP
)
941 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
943 if (optype1
== MEMOP
)
944 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
946 /* Ok, we can do one word at a time.
947 Normally we do the low-numbered word first,
948 but if either operand is autodecrementing then we
949 do the high-numbered word first.
951 In either case, set up in LATEHALF the operands to use
952 for the high-numbered word and in some cases alter the
953 operands in OPERANDS to be suitable for the low-numbered word. */
957 if (optype0
== REGOP
)
959 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
960 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
962 else if (optype0
== OFFSOP
)
964 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
965 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
969 middlehalf
[0] = operands
[0];
970 latehalf
[0] = operands
[0];
973 if (optype1
== REGOP
)
975 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
976 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
978 else if (optype1
== OFFSOP
)
980 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
981 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
983 else if (optype1
== CNSTOP
)
985 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
987 REAL_VALUE_TYPE r
; long l
[3];
989 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
990 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
991 operands
[1] = GEN_INT (l
[0]);
992 middlehalf
[1] = GEN_INT (l
[1]);
993 latehalf
[1] = GEN_INT (l
[2]);
995 else if (CONSTANT_P (operands
[1]))
996 /* No non-CONST_DOUBLE constant should ever appear here. */
1001 middlehalf
[1] = operands
[1];
1002 latehalf
[1] = operands
[1];
1005 else /* size is not 12: */
1007 if (optype0
== REGOP
)
1008 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1009 else if (optype0
== OFFSOP
)
1010 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1012 latehalf
[0] = operands
[0];
1014 if (optype1
== REGOP
)
1015 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1016 else if (optype1
== OFFSOP
)
1017 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1018 else if (optype1
== CNSTOP
)
1019 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1021 latehalf
[1] = operands
[1];
1024 /* If insn is effectively movd N (sp),-(sp) then we will do the
1025 high word first. We should use the adjusted operand 1
1026 (which is N+4 (sp) or N+8 (sp))
1027 for the low word and middle word as well,
1028 to compensate for the first decrement of sp. */
1029 if (optype0
== PUSHOP
1030 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1031 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1032 middlehalf
[1] = operands
[1] = latehalf
[1];
1034 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1035 if the upper part of reg N does not appear in the MEM, arrange to
1036 emit the move late-half first. Otherwise, compute the MEM address
1037 into the upper part of N and use that as a pointer to the memory
1039 if (optype0
== REGOP
1040 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1042 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1043 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1045 /* If both halves of dest are used in the src memory address,
1046 compute the address into latehalf of dest. */
1048 xops
[0] = latehalf
[0];
1049 xops
[1] = XEXP (operands
[1], 0);
1050 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1051 if( GET_MODE (operands
[1]) == XFmode
)
1054 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1055 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1056 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1060 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1061 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1065 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1067 /* Check for two regs used by both source and dest. */
1068 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1069 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1072 /* JRV says this can't happen: */
1073 if (addreg0
|| addreg1
)
1076 /* Only the middle reg conflicts; simply put it last. */
1077 output_asm_insn (singlemove_string (operands
), operands
);
1078 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1079 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1082 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1083 /* If the low half of dest is mentioned in the source memory
1084 address, the arrange to emit the move late half first. */
1085 dest_overlapped_low
= 1;
1088 /* If one or both operands autodecrementing,
1089 do the two words, high-numbered first. */
1091 /* Likewise, the first move would clobber the source of the second one,
1092 do them in the other order. This happens only for registers;
1093 such overlap can't happen in memory unless the user explicitly
1094 sets it up, and that is an undefined circumstance. */
1097 if (optype0 == PUSHOP || optype1 == PUSHOP
1098 || (optype0 == REGOP && optype1 == REGOP
1099 && REGNO (operands[0]) == REGNO (latehalf[1]))
1100 || dest_overlapped_low)
1102 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1103 || (optype0
== REGOP
&& optype1
== REGOP
1104 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1105 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1106 || dest_overlapped_low
)
1108 /* Make any unoffsettable addresses point at high-numbered word. */
1110 asm_add (size
-4, addreg0
);
1112 asm_add (size
-4, addreg1
);
1115 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1117 /* Undo the adds we just did. */
1119 asm_add (-4, addreg0
);
1121 asm_add (-4, addreg1
);
1125 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1127 asm_add (-4, addreg0
);
1129 asm_add (-4, addreg1
);
1132 /* Do low-numbered word. */
1133 return singlemove_string (operands
);
1136 /* Normal case: do the two words, low-numbered first. */
1138 output_asm_insn (singlemove_string (operands
), operands
);
1140 /* Do the middle one of the three words for long double */
1144 asm_add (4, addreg0
);
1146 asm_add (4, addreg1
);
1148 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1151 /* Make any unoffsettable addresses point at high-numbered word. */
1153 asm_add (4, addreg0
);
1155 asm_add (4, addreg1
);
1158 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1160 /* Undo the adds we just did. */
1162 asm_add (4-size
, addreg0
);
1164 asm_add (4-size
, addreg1
);
1170 #define MAX_TMPS 2 /* max temporary registers used */
1172 /* Output the appropriate code to move push memory on the stack */
1175 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1187 } tmp_info
[MAX_TMPS
];
1189 rtx src
= operands
[1];
1192 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1193 int stack_offset
= 0;
1197 if (!offsettable_memref_p (src
))
1198 fatal_insn ("Source is not offsettable", insn
);
1200 if ((length
& 3) != 0)
1201 fatal_insn ("Pushing non-word aligned size", insn
);
1203 /* Figure out which temporary registers we have available */
1204 for (i
= tmp_start
; i
< n_operands
; i
++)
1206 if (GET_CODE (operands
[i
]) == REG
)
1208 if (reg_overlap_mentioned_p (operands
[i
], src
))
1211 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1212 if (max_tmps
== MAX_TMPS
)
1218 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1220 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1221 output_asm_insn (AS1(push
%L0
,%0), xops
);
1227 for (offset
= length
- 4; offset
>= 0; )
1229 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1231 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1232 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1233 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1237 for (i
= 0; i
< num_tmps
; i
++)
1238 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1240 for (i
= 0; i
< num_tmps
; i
++)
1241 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1244 stack_offset
+= 4*num_tmps
;
1252 /* Output the appropriate code to move data between two memory locations */
1255 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1266 } tmp_info
[MAX_TMPS
];
1268 rtx dest
= operands
[0];
1269 rtx src
= operands
[1];
1270 rtx qi_tmp
= NULL_RTX
;
1276 if (GET_CODE (dest
) == MEM
1277 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1278 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1279 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1281 if (!offsettable_memref_p (src
))
1282 fatal_insn ("Source is not offsettable", insn
);
1284 if (!offsettable_memref_p (dest
))
1285 fatal_insn ("Destination is not offsettable", insn
);
1287 /* Figure out which temporary registers we have available */
1288 for (i
= tmp_start
; i
< n_operands
; i
++)
1290 if (GET_CODE (operands
[i
]) == REG
)
1292 if ((length
& 1) != 0 && !qi_tmp
&& QI_REG_P (operands
[i
]))
1293 qi_tmp
= operands
[i
];
1295 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1296 fatal_insn ("Temporary register overlaps the destination", insn
);
1298 if (reg_overlap_mentioned_p (operands
[i
], src
))
1299 fatal_insn ("Temporary register overlaps the source", insn
);
1301 tmp_info
[ max_tmps
++ ].xops
[2] = operands
[i
];
1302 if (max_tmps
== MAX_TMPS
)
1308 fatal_insn ("No scratch registers were found to do memory->memory moves", insn
);
1310 if ((length
& 1) != 0)
1313 fatal_insn ("No byte register found when moving odd # of bytes.", insn
);
1318 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1322 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1323 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1324 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1325 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1329 else if (length
>= 2)
1331 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1332 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1333 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1334 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1342 for (i
= 0; i
< num_tmps
; i
++)
1343 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1345 for (i
= 0; i
< num_tmps
; i
++)
1346 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1351 xops
[0] = adj_offsettable_operand (dest
, offset
);
1352 xops
[1] = adj_offsettable_operand (src
, offset
);
1354 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1355 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1363 standard_80387_constant_p (x
)
1366 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1371 if (setjmp (handler
))
1374 set_float_handler (handler
);
1375 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1376 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1377 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1378 set_float_handler (NULL_PTR
);
1386 /* Note that on the 80387, other constants, such as pi,
1387 are much slower to load as standard constants
1388 than to load from doubles in memory! */
1395 output_move_const_single (operands
)
1398 if (FP_REG_P (operands
[0]))
1400 int conval
= standard_80387_constant_p (operands
[1]);
1408 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1410 REAL_VALUE_TYPE r
; long l
;
1412 if (GET_MODE (operands
[1]) == XFmode
)
1415 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1416 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1417 operands
[1] = GEN_INT (l
);
1419 return singlemove_string (operands
);
1422 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1423 reference and a constant. */
1426 symbolic_operand (op
, mode
)
1428 enum machine_mode mode
;
1430 switch (GET_CODE (op
))
1437 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1438 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1439 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1445 /* Test for a valid operand for a call instruction.
1446 Don't allow the arg pointer register or virtual regs
1447 since they may change into reg + const, which the patterns
1448 can't handle yet. */
1451 call_insn_operand (op
, mode
)
1453 enum machine_mode mode
;
1455 if (GET_CODE (op
) == MEM
1456 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1457 /* This makes a difference for PIC. */
1458 && general_operand (XEXP (op
, 0), Pmode
))
1459 || (GET_CODE (XEXP (op
, 0)) == REG
1460 && XEXP (op
, 0) != arg_pointer_rtx
1461 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1462 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1467 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1471 expander_call_insn_operand (op
, mode
)
1473 enum machine_mode mode
;
1475 if (GET_CODE (op
) == MEM
1476 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1477 || (GET_CODE (XEXP (op
, 0)) == REG
1478 && XEXP (op
, 0) != arg_pointer_rtx
1479 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1480 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1485 /* Return 1 if OP is a comparison operator that can use the condition code
1486 generated by an arithmetic operation. */
1489 arithmetic_comparison_operator (op
, mode
)
1491 enum machine_mode mode
;
1495 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1497 code
= GET_CODE (op
);
1498 if (GET_RTX_CLASS (code
) != '<')
1501 return (code
!= GT
&& code
!= LE
);
1504 /* Returns 1 if OP contains a symbol reference */
1507 symbolic_reference_mentioned_p (op
)
1513 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1516 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1517 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1523 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1524 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1527 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1534 /* Attempt to expand a binary operator. Make the expansion closer to the
1535 actual machine, then just general_operand, which will allow 3 separate
1536 memory references (one output, two input) in a single insn. Return
1537 whether the insn fails, or succeeds. */
1540 ix86_expand_binary_operator (code
, mode
, operands
)
1542 enum machine_mode mode
;
1549 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1550 if (GET_RTX_CLASS (code
) == 'c'
1551 && (rtx_equal_p (operands
[0], operands
[2])
1552 || immediate_operand (operands
[1], mode
)))
1554 rtx temp
= operands
[1];
1555 operands
[1] = operands
[2];
1559 /* If optimizing, copy to regs to improve CSE */
1560 if (TARGET_PSEUDO
&& optimize
&& ((reload_in_progress
| reload_completed
) == 0))
1562 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1563 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1565 if (GET_CODE (operands
[2]) == MEM
)
1566 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1568 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1570 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1571 emit_move_insn (temp
, operands
[1]);
1577 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1579 /* If not optimizing, try to make a valid insn (optimize code previously did
1580 this above to improve chances of CSE) */
1582 if ((!TARGET_PSEUDO
|| !optimize
)
1583 && ((reload_in_progress
| reload_completed
) == 0)
1584 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1587 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1589 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1593 if (GET_CODE (operands
[2]) == MEM
)
1595 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1599 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1601 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1602 emit_move_insn (temp
, operands
[1]);
1607 if (modified
&& !ix86_binary_operator_ok (code
, mode
, operands
))
1617 /* Return TRUE or FALSE depending on whether the binary operator meets the
1618 appropriate constraints. */
1621 ix86_binary_operator_ok (code
, mode
, operands
)
1623 enum machine_mode mode
;
1626 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1627 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1630 /* Attempt to expand a unary operator. Make the expansion closer to the
1631 actual machine, then just general_operand, which will allow 2 separate
1632 memory references (one output, one input) in a single insn. Return
1633 whether the insn fails, or succeeds. */
1636 ix86_expand_unary_operator (code
, mode
, operands
)
1638 enum machine_mode mode
;
1643 /* If optimizing, copy to regs to improve CSE */
1646 && ((reload_in_progress
| reload_completed
) == 0)
1647 && GET_CODE (operands
[1]) == MEM
)
1649 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1652 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1654 if ((!TARGET_PSEUDO
|| !optimize
)
1655 && ((reload_in_progress
| reload_completed
) == 0)
1656 && GET_CODE (operands
[1]) == MEM
)
1658 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1659 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1669 /* Return TRUE or FALSE depending on whether the unary operator meets the
1670 appropriate constraints. */
1673 ix86_unary_operator_ok (code
, mode
, operands
)
1675 enum machine_mode mode
;
1683 static rtx pic_label_rtx
;
1685 /* This function generates code for -fpic that loads %ebx with
1686 with the return address of the caller and then returns. */
1688 asm_output_function_prefix (file
, name
)
1693 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1694 || current_function_uses_const_pool
);
1695 xops
[0] = pic_offset_table_rtx
;
1696 xops
[1] = stack_pointer_rtx
;
1698 /* deep branch prediction favors having a return for every call */
1699 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1701 if (pic_label_rtx
== 0)
1702 pic_label_rtx
= (rtx
) gen_label_rtx ();
1703 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (pic_label_rtx
));
1704 output_asm_insn ("movl (%1),%0", xops
);
1705 output_asm_insn ("ret", xops
);
1709 /* Set up the stack and frame (if desired) for the function. */
1712 function_prologue (file
, size
)
1719 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1720 || current_function_uses_const_pool
);
1721 long tsize
= get_frame_size ();
1723 /* pic references don't explicitly mention pic_offset_table_rtx */
1724 if (TARGET_SCHEDULE_PROLOGUE
)
1727 xops
[0] = stack_pointer_rtx
;
1728 xops
[1] = frame_pointer_rtx
;
1729 xops
[2] = GEN_INT (tsize
);
1730 if (frame_pointer_needed
)
1732 output_asm_insn ("push%L1 %1", xops
);
1733 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
1737 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
1739 /* Note If use enter it is NOT reversed args.
1740 This one is not reversed from intel!!
1741 I think enter is slower. Also sdb doesn't like it.
1742 But if you want it the code is:
1744 xops[3] = const0_rtx;
1745 output_asm_insn ("enter %2,%3", xops);
1748 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1749 for (regno
= limit
- 1; regno
>= 0; regno
--)
1750 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1751 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1753 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1754 output_asm_insn ("push%L0 %0", xops
);
1757 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1759 xops
[0] = pic_offset_table_rtx
;
1760 if (pic_label_rtx
== 0)
1761 pic_label_rtx
= (rtx
) gen_label_rtx ();
1762 xops
[1] = pic_label_rtx
;
1764 output_asm_insn (AS1 (call
,%P1
), xops
);
1765 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1767 else if (pic_reg_used
)
1769 xops
[0] = pic_offset_table_rtx
;
1770 xops
[1] = (rtx
) gen_label_rtx ();
1772 output_asm_insn (AS1 (call
,%P1
), xops
);
1773 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (xops
[1]));
1774 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1775 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1779 /* This function generates the assembly code for function entry.
1780 FILE is an stdio stream to output the code to.
1781 SIZE is an int: how many units of temporary storage to allocate. */
1784 ix86_expand_prologue ()
1789 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1790 || current_function_uses_const_pool
);
1791 long tsize
= get_frame_size ();
1793 if (!TARGET_SCHEDULE_PROLOGUE
)
1796 xops
[0] = stack_pointer_rtx
;
1797 xops
[1] = frame_pointer_rtx
;
1798 xops
[2] = GEN_INT (tsize
);
1799 if (frame_pointer_needed
)
1801 emit_insn (gen_rtx (SET
, 0,
1802 gen_rtx (MEM
, SImode
,
1803 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1804 frame_pointer_rtx
));
1805 emit_move_insn (xops
[1], xops
[0]);
1809 emit_insn (gen_rtx (SET
, SImode
,
1811 gen_rtx (MINUS
, SImode
,
1815 /* Note If use enter it is NOT reversed args.
1816 This one is not reversed from intel!!
1817 I think enter is slower. Also sdb doesn't like it.
1818 But if you want it the code is:
1820 xops[3] = const0_rtx;
1821 output_asm_insn ("enter %2,%3", xops);
1824 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1825 for (regno
= limit
- 1; regno
>= 0; regno
--)
1826 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1827 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1829 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1830 emit_insn (gen_rtx (SET
, 0,
1831 gen_rtx (MEM
, SImode
,
1832 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1836 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1838 xops
[0] = pic_offset_table_rtx
;
1839 if (pic_label_rtx
== 0)
1840 pic_label_rtx
= (rtx
) gen_label_rtx ();
1841 xops
[1] = pic_label_rtx
;
1843 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1844 emit_insn (gen_prologue_set_got (xops
[0],
1845 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1846 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1848 else if (pic_reg_used
)
1850 xops
[0] = pic_offset_table_rtx
;
1851 xops
[1] = (rtx
) gen_label_rtx ();
1853 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1854 emit_insn (gen_pop (xops
[0]));
1855 emit_insn (gen_prologue_set_got (xops
[0],
1856 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1857 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER (xops
[1]))));
1861 /* Restore function stack, frame, and registers. */
1864 function_epilogue (file
, size
)
1870 /* Return 1 if it is appropriate to emit `ret' instructions in the
1871 body of a function. Do this only if the epilogue is simple, needing a
1872 couple of insns. Prior to reloading, we can't tell how many registers
1873 must be saved, so return 0 then. Return 0 if there is no frame
1874 marker to de-allocate.
1876 If NON_SAVING_SETJMP is defined and true, then it is not possible
1877 for the epilogue to be simple, so return 0. This is a special case
1878 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1879 until final, but jump_optimize may need to know sooner if a
1883 ix86_can_use_return_insn_p ()
1887 int reglimit
= (frame_pointer_needed
1888 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1889 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1890 || current_function_uses_const_pool
);
1892 #ifdef NON_SAVING_SETJMP
1893 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1897 if (! reload_completed
)
1900 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
1901 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1902 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1905 return nregs
== 0 || ! frame_pointer_needed
;
1909 /* This function generates the assembly code for function exit.
1910 FILE is an stdio stream to output the code to.
1911 SIZE is an int: how many units of temporary storage to deallocate. */
1914 ix86_expand_epilogue ()
1917 register int nregs
, limit
;
1920 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1921 || current_function_uses_const_pool
);
1922 long tsize
= get_frame_size ();
1924 /* Compute the number of registers to pop */
1926 limit
= (frame_pointer_needed
1927 ? FRAME_POINTER_REGNUM
1928 : STACK_POINTER_REGNUM
);
1932 for (regno
= limit
- 1; regno
>= 0; regno
--)
1933 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1934 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1937 /* sp is often unreliable so we must go off the frame pointer,
1940 /* In reality, we may not care if sp is unreliable, because we can
1941 restore the register relative to the frame pointer. In theory,
1942 since each move is the same speed as a pop, and we don't need the
1943 leal, this is faster. For now restore multiple registers the old
1946 offset
= -tsize
- (nregs
* UNITS_PER_WORD
);
1948 xops
[2] = stack_pointer_rtx
;
1950 if (nregs
> 1 || ! frame_pointer_needed
)
1952 if (frame_pointer_needed
)
1954 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
1955 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
1956 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1959 for (regno
= 0; regno
< limit
; regno
++)
1960 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1961 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1963 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1964 emit_insn (gen_pop (xops
[0]));
1965 /* output_asm_insn ("pop%L0 %0", xops);*/
1969 for (regno
= 0; regno
< limit
; regno
++)
1970 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1971 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1973 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1974 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
1975 emit_move_insn (xops
[0], xops
[1]);
1976 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1980 if (frame_pointer_needed
)
1982 /* If not an i386, mov & pop is faster than "leave". */
1984 if (TARGET_USE_LEAVE
)
1985 emit_insn (gen_leave());
1986 /* output_asm_insn ("leave", xops);*/
1989 xops
[0] = frame_pointer_rtx
;
1990 xops
[1] = stack_pointer_rtx
;
1991 emit_insn (gen_epilogue_set_stack_ptr());
1992 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
1993 emit_insn (gen_pop (xops
[0]));
1994 /* output_asm_insn ("pop%L0 %0", xops);*/
1999 /* If there is no frame pointer, we must still release the frame. */
2001 xops
[0] = GEN_INT (tsize
);
2002 emit_insn (gen_rtx (SET
, SImode
,
2004 gen_rtx (PLUS
, SImode
,
2007 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2010 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2011 if (profile_block_flag
== 2)
2013 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2017 if (current_function_pops_args
&& current_function_args_size
)
2019 xops
[1] = GEN_INT (current_function_pops_args
);
2021 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2022 asked to pop more, pop return address, do explicit add, and jump
2023 indirectly to the caller. */
2025 if (current_function_pops_args
>= 32768)
2027 /* ??? Which register to use here? */
2028 xops
[0] = gen_rtx (REG
, SImode
, 2);
2029 emit_insn (gen_pop (xops
[0]));
2030 /* output_asm_insn ("pop%L0 %0", xops);*/
2031 emit_insn (gen_rtx (SET
, SImode
,
2033 gen_rtx (PLUS
, SImode
,
2036 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2037 emit_jump_insn (xops
[0]);
2038 /* output_asm_insn ("jmp %*%0", xops);*/
2041 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2042 /* output_asm_insn ("ret %1", xops);*/
2045 /* output_asm_insn ("ret", xops);*/
2046 emit_jump_insn (gen_return_internal ());
2050 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2051 that is a valid memory address for an instruction.
2052 The MODE argument is the machine mode for the MEM expression
2053 that wants to use this address.
2055 On x86, legitimate addresses are:
2056 base movl (base),reg
2057 displacement movl disp,reg
2058 base + displacement movl disp(base),reg
2059 index + base movl (base,index),reg
2060 (index + base) + displacement movl disp(base,index),reg
2061 index*scale movl (,index,scale),reg
2062 index*scale + disp movl disp(,index,scale),reg
2063 index*scale + base movl (base,index,scale),reg
2064 (index*scale + base) + disp movl disp(base,index,scale),reg
2066 In each case, scale can be 1, 2, 4, 8. */
2068 /* This is exactly the same as print_operand_addr, except that
2069 it recognizes addresses instead of printing them.
2071 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2072 convert common non-canonical forms to canonical form so that they will
2075 #define ADDR_INVALID(msg,insn) \
2077 if (TARGET_DEBUG_ADDR) \
2079 fprintf (stderr, msg); \
2085 legitimate_address_p (mode
, addr
, strict
)
2086 enum machine_mode mode
;
2090 rtx base
= NULL_RTX
;
2091 rtx indx
= NULL_RTX
;
2092 rtx scale
= NULL_RTX
;
2093 rtx disp
= NULL_RTX
;
2095 if (TARGET_DEBUG_ADDR
)
2098 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2099 GET_MODE_NAME (mode
), strict
);
2104 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2105 base
= addr
; /* base reg */
2107 else if (GET_CODE (addr
) == PLUS
)
2109 rtx op0
= XEXP (addr
, 0);
2110 rtx op1
= XEXP (addr
, 1);
2111 enum rtx_code code0
= GET_CODE (op0
);
2112 enum rtx_code code1
= GET_CODE (op1
);
2114 if (code0
== REG
|| code0
== SUBREG
)
2116 if (code1
== REG
|| code1
== SUBREG
)
2118 indx
= op0
; /* index + base */
2124 base
= op0
; /* base + displacement */
2129 else if (code0
== MULT
)
2131 indx
= XEXP (op0
, 0);
2132 scale
= XEXP (op0
, 1);
2134 if (code1
== REG
|| code1
== SUBREG
)
2135 base
= op1
; /* index*scale + base */
2138 disp
= op1
; /* index*scale + disp */
2141 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2143 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2144 scale
= XEXP (XEXP (op0
, 0), 1);
2145 base
= XEXP (op0
, 1);
2149 else if (code0
== PLUS
)
2151 indx
= XEXP (op0
, 0); /* index + base + disp */
2152 base
= XEXP (op0
, 1);
2158 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2163 else if (GET_CODE (addr
) == MULT
)
2165 indx
= XEXP (addr
, 0); /* index*scale */
2166 scale
= XEXP (addr
, 1);
2170 disp
= addr
; /* displacement */
2172 /* Allow arg pointer and stack pointer as index if there is not scaling */
2173 if (base
&& indx
&& !scale
2174 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2181 /* Validate base register */
2182 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2183 is one word out of a two word structure, which is represented internally
2187 if (GET_CODE (base
) != REG
)
2189 ADDR_INVALID ("Base is not a register.\n", base
);
2193 if ((strict
&& !REG_OK_FOR_BASE_STRICT_P (base
))
2194 || (!strict
&& !REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2196 ADDR_INVALID ("Base is not valid.\n", base
);
2201 /* Validate index register */
2202 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2203 is one word out of a two word structure, which is represented internally
2207 if (GET_CODE (indx
) != REG
)
2209 ADDR_INVALID ("Index is not a register.\n", indx
);
2213 if ((strict
&& !REG_OK_FOR_INDEX_STRICT_P (indx
))
2214 || (!strict
&& !REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2216 ADDR_INVALID ("Index is not valid.\n", indx
);
2221 abort (); /* scale w/o index invalid */
2223 /* Validate scale factor */
2226 HOST_WIDE_INT value
;
2228 if (GET_CODE (scale
) != CONST_INT
)
2230 ADDR_INVALID ("Scale is not valid.\n", scale
);
2234 value
= INTVAL (scale
);
2235 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2237 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2242 /* Validate displacement
2243 Constant pool addresses must be handled special. They are
2244 considered legitimate addresses, but only if not used with regs.
2245 When printed, the output routines know to print the reference with the
2246 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2249 if (GET_CODE (disp
) == SYMBOL_REF
2250 && CONSTANT_POOL_ADDRESS_P (disp
)
2255 else if (!CONSTANT_ADDRESS_P (disp
))
2257 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2261 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2263 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2267 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2268 && base
!= pic_offset_table_rtx
2269 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2271 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2275 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2276 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2278 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp
);
2283 if (TARGET_DEBUG_ADDR
)
2284 fprintf (stderr
, "Address is valid.\n");
2286 /* Everything looks valid, return true */
2291 /* Return a legitimate reference for ORIG (an address) using the
2292 register REG. If REG is 0, a new pseudo is generated.
2294 There are three types of references that must be handled:
2296 1. Global data references must load the address from the GOT, via
2297 the PIC reg. An insn is emitted to do this load, and the reg is
2300 2. Static data references must compute the address as an offset
2301 from the GOT, whose base is in the PIC reg. An insn is emitted to
2302 compute the address into a reg, and the reg is returned. Static
2303 data objects have SYMBOL_REF_FLAG set to differentiate them from
2304 global data objects.
2306 3. Constant pool addresses must be handled special. They are
2307 considered legitimate addresses, but only if not used with regs.
2308 When printed, the output routines know to print the reference with the
2309 PIC reg, even though the PIC reg doesn't appear in the RTL.
2311 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2312 reg also appears in the address (except for constant pool references,
2315 "switch" statements also require special handling when generating
2316 PIC code. See comments by the `casesi' insn in i386.md for details. */
2319 legitimize_pic_address (orig
, reg
)
2326 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2328 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2333 reg
= gen_reg_rtx (Pmode
);
2335 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2336 || GET_CODE (addr
) == LABEL_REF
)
2337 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2339 new = gen_rtx (MEM
, Pmode
,
2340 gen_rtx (PLUS
, Pmode
,
2341 pic_offset_table_rtx
, orig
));
2343 emit_move_insn (reg
, new);
2345 current_function_uses_pic_offset_table
= 1;
2348 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2352 if (GET_CODE (addr
) == CONST
)
2354 addr
= XEXP (addr
, 0);
2355 if (GET_CODE (addr
) != PLUS
)
2359 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2363 reg
= gen_reg_rtx (Pmode
);
2365 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2366 addr
= legitimize_pic_address (XEXP (addr
, 1),
2367 base
== reg
? NULL_RTX
: reg
);
2369 if (GET_CODE (addr
) == CONST_INT
)
2370 return plus_constant (base
, INTVAL (addr
));
2372 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2374 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2375 addr
= XEXP (addr
, 1);
2377 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2383 /* Emit insns to move operands[1] into operands[0]. */
2386 emit_pic_move (operands
, mode
)
2388 enum machine_mode mode
;
2390 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2392 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2393 operands
[1] = (rtx
) force_reg (SImode
, operands
[1]);
2395 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2399 /* Try machine-dependent ways of modifying an illegitimate address
2400 to be legitimate. If we find one, return the new, valid address.
2401 This macro is used in only one place: `memory_address' in explow.c.
2403 OLDX is the address as it was before break_out_memory_refs was called.
2404 In some cases it is useful to look at this to decide what needs to be done.
2406 MODE and WIN are passed so that this macro can use
2407 GO_IF_LEGITIMATE_ADDRESS.
2409 It is always safe for this macro to do nothing. It exists to recognize
2410 opportunities to optimize the output.
2412 For the 80386, we handle X+REG by loading X into a register R and
2413 using R+REG. R will go in a general reg and indexing will be used.
2414 However, if REG is a broken-out memory address or multiplication,
2415 nothing needs to be done because REG can certainly go in a general reg.
2417 When -fpic is used, special handling is needed for symbolic references.
2418 See comments by legitimize_pic_address in i386.c for details. */
2421 legitimize_address (x
, oldx
, mode
)
2424 enum machine_mode mode
;
2429 if (TARGET_DEBUG_ADDR
)
2431 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode
));
2435 if (flag_pic
&& SYMBOLIC_CONST (x
))
2436 return legitimize_pic_address (x
, 0);
2438 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2439 if (GET_CODE (x
) == ASHIFT
2440 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2441 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2444 x
= gen_rtx (MULT
, Pmode
,
2445 force_reg (Pmode
, XEXP (x
, 0)),
2446 GEN_INT (1 << log
));
2449 if (GET_CODE (x
) == PLUS
)
2451 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2452 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2453 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2454 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2457 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2458 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2459 GEN_INT (1 << log
));
2462 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2463 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2464 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2467 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2468 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2469 GEN_INT (1 << log
));
2472 /* Put multiply first if it isn't already */
2473 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2475 rtx tmp
= XEXP (x
, 0);
2476 XEXP (x
, 0) = XEXP (x
, 1);
2481 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2482 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2483 created by virtual register instantiation, register elimination, and
2484 similar optimizations. */
2485 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2488 x
= gen_rtx (PLUS
, Pmode
,
2489 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)),
2490 XEXP (XEXP (x
, 1), 1));
2493 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2494 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2495 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2496 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2497 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2498 && CONSTANT_P (XEXP (x
, 1)))
2500 rtx constant
, other
;
2502 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2504 constant
= XEXP (x
, 1);
2505 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2507 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2509 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2510 other
= XEXP (x
, 1);
2518 x
= gen_rtx (PLUS
, Pmode
,
2519 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2520 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2521 plus_constant (other
, INTVAL (constant
)));
2525 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2528 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2531 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2534 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2537 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2541 && GET_CODE (XEXP (x
, 1)) == REG
2542 && GET_CODE (XEXP (x
, 0)) == REG
)
2545 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2548 x
= legitimize_pic_address (x
, 0);
2551 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2554 if (GET_CODE (XEXP (x
, 0)) == REG
)
2556 register rtx temp
= gen_reg_rtx (Pmode
);
2557 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2559 emit_move_insn (temp
, val
);
2565 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2567 register rtx temp
= gen_reg_rtx (Pmode
);
2568 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2570 emit_move_insn (temp
, val
);
2581 /* Print an integer constant expression in assembler syntax. Addition
2582 and subtraction are the only arithmetic that may appear in these
2583 expressions. FILE is the stdio stream to write to, X is the rtx, and
2584 CODE is the operand print code from the output string. */
2587 output_pic_addr_const (file
, x
, code
)
2594 switch (GET_CODE (x
))
2605 if (GET_CODE (x
) == SYMBOL_REF
)
2606 assemble_name (file
, XSTR (x
, 0));
2609 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2610 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2611 assemble_name (asm_out_file
, buf
);
2614 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2615 fprintf (file
, "@GOTOFF(%%ebx)");
2616 else if (code
== 'P')
2617 fprintf (file
, "@PLT");
2618 else if (GET_CODE (x
) == LABEL_REF
)
2619 fprintf (file
, "@GOTOFF");
2620 else if (! SYMBOL_REF_FLAG (x
))
2621 fprintf (file
, "@GOT");
2623 fprintf (file
, "@GOTOFF");
2628 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2629 assemble_name (asm_out_file
, buf
);
2633 fprintf (file
, "%d", INTVAL (x
));
2637 /* This used to output parentheses around the expression,
2638 but that does not work on the 386 (either ATT or BSD assembler). */
2639 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2643 if (GET_MODE (x
) == VOIDmode
)
2645 /* We can use %d if the number is <32 bits and positive. */
2646 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2647 fprintf (file
, "0x%x%08x",
2648 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2650 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2653 /* We can't handle floating point constants;
2654 PRINT_OPERAND must handle them. */
2655 output_operand_lossage ("floating constant misused");
2659 /* Some assemblers need integer constants to appear last (eg masm). */
2660 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
2662 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2663 if (INTVAL (XEXP (x
, 0)) >= 0)
2664 fprintf (file
, "+");
2665 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2669 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2670 if (INTVAL (XEXP (x
, 1)) >= 0)
2671 fprintf (file
, "+");
2672 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2677 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2678 fprintf (file
, "-");
2679 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2683 output_operand_lossage ("invalid expression as operand");
2688 /* Append the correct conditional move suffix which corresponds to CODE */
2691 put_condition_code (code
, file
)
2698 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2704 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2710 fputs ("ge", file
); return;
2712 fputs ("g", file
); return;
2714 fputs ("le", file
); return;
2716 fputs ("l", file
); return;
2718 fputs ("ae", file
); return;
2720 fputs ("a", file
); return;
2722 fputs ("be", file
); return;
2724 fputs ("b", file
); return;
2725 default: output_operand_lossage ("Invalid %%C operand");
2730 f -- float insn (print a CONST_DOUBLE as a float rather than in hex).
2731 D,L,W,B,Q,S -- print the opcode suffix for specified size of operand.
2732 C -- print opcode suffix for set/cmov insn.
2733 N -- like C, but print reversed condition
2734 R -- print the prefix for register names.
2735 z -- print the opcode suffix for the size of the current operand.
2736 * -- print a star (in certain assembler syntax)
2737 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2738 c -- don't print special prefixes before constant operands.
2739 J -- print the appropriate jump operand.
2740 s -- print a shift double count, followed by the assemblers argument
2745 print_operand (file
, x
, code
)
2760 PUT_OP_SIZE (code
, 'l', file
);
2764 PUT_OP_SIZE (code
, 'w', file
);
2768 PUT_OP_SIZE (code
, 'b', file
);
2772 PUT_OP_SIZE (code
, 'l', file
);
2776 PUT_OP_SIZE (code
, 's', file
);
2780 PUT_OP_SIZE (code
, 't', file
);
2784 /* 387 opcodes don't get size suffixes if the operands are
2787 if (STACK_REG_P (x
))
2790 /* this is the size of op from size of operand */
2791 switch (GET_MODE_SIZE (GET_MODE (x
)))
2794 PUT_OP_SIZE ('B', 'b', file
);
2798 PUT_OP_SIZE ('W', 'w', file
);
2802 if (GET_MODE (x
) == SFmode
)
2804 PUT_OP_SIZE ('S', 's', file
);
2808 PUT_OP_SIZE ('L', 'l', file
);
2812 PUT_OP_SIZE ('T', 't', file
);
2816 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
2818 #ifdef GAS_MNEMONICS
2819 PUT_OP_SIZE ('Q', 'q', file
);
2822 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
2826 PUT_OP_SIZE ('Q', 'l', file
);
2839 switch (GET_CODE (x
))
2841 /* These conditions are appropriate for testing the result
2842 of an arithmetic operation, not for a compare operation.
2843 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2844 CC_Z_IN_NOT_C false and not floating point. */
2845 case NE
: fputs ("jne", file
); return;
2846 case EQ
: fputs ("je", file
); return;
2847 case GE
: fputs ("jns", file
); return;
2848 case LT
: fputs ("js", file
); return;
2849 case GEU
: fputs ("jmp", file
); return;
2850 case GTU
: fputs ("jne", file
); return;
2851 case LEU
: fputs ("je", file
); return;
2852 case LTU
: fputs ("#branch never", file
); return;
2854 /* no matching branches for GT nor LE */
2859 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
2861 PRINT_OPERAND (file
, x
, 0);
2862 fputs (AS2C (,) + 1, file
);
2866 /* This is used by the conditional move instructions. */
2868 put_condition_code (GET_CODE (x
), file
);
2870 /* like above, but reverse condition */
2872 put_condition_code (reverse_condition (GET_CODE (x
)), file
);
2879 sprintf (str
, "invalid operand code `%c'", code
);
2880 output_operand_lossage (str
);
2884 if (GET_CODE (x
) == REG
)
2886 PRINT_REG (x
, code
, file
);
2888 else if (GET_CODE (x
) == MEM
)
2890 PRINT_PTR (x
, file
);
2891 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
2894 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2896 output_addr_const (file
, XEXP (x
, 0));
2899 output_address (XEXP (x
, 0));
2901 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
2903 REAL_VALUE_TYPE r
; long l
;
2904 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2905 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
2906 PRINT_IMMED_PREFIX (file
);
2907 fprintf (file
, "0x%x", l
);
2909 /* These float cases don't actually occur as immediate operands. */
2910 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
2912 REAL_VALUE_TYPE r
; char dstr
[30];
2913 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2914 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
2915 fprintf (file
, "%s", dstr
);
2917 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
2919 REAL_VALUE_TYPE r
; char dstr
[30];
2920 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2921 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
2922 fprintf (file
, "%s", dstr
);
2928 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
2929 PRINT_IMMED_PREFIX (file
);
2930 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
2931 || GET_CODE (x
) == LABEL_REF
)
2932 PRINT_OFFSET_PREFIX (file
);
2935 output_pic_addr_const (file
, x
, code
);
2937 output_addr_const (file
, x
);
2941 /* Print a memory operand whose address is ADDR. */
2944 print_operand_address (file
, addr
)
2948 register rtx reg1
, reg2
, breg
, ireg
;
2951 switch (GET_CODE (addr
))
2955 fprintf (file
, "%se", RP
);
2956 fputs (hi_reg_name
[REGNO (addr
)], file
);
2966 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
2968 offset
= XEXP (addr
, 0);
2969 addr
= XEXP (addr
, 1);
2971 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
2973 offset
= XEXP (addr
, 1);
2974 addr
= XEXP (addr
, 0);
2976 if (GET_CODE (addr
) != PLUS
) ;
2977 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
2979 reg1
= XEXP (addr
, 0);
2980 addr
= XEXP (addr
, 1);
2982 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
2984 reg1
= XEXP (addr
, 1);
2985 addr
= XEXP (addr
, 0);
2987 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
2989 reg1
= XEXP (addr
, 0);
2990 addr
= XEXP (addr
, 1);
2992 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
2994 reg1
= XEXP (addr
, 1);
2995 addr
= XEXP (addr
, 0);
2997 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
2999 if (reg1
== 0) reg1
= addr
;
3005 if (addr
!= 0) abort ();
3008 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3009 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3014 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3020 if (ireg
!= 0 || breg
!= 0)
3027 output_pic_addr_const (file
, addr
, 0);
3029 else if (GET_CODE (addr
) == LABEL_REF
)
3030 output_asm_label (addr
);
3033 output_addr_const (file
, addr
);
3036 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3038 scale
= INTVAL (XEXP (ireg
, 1));
3039 ireg
= XEXP (ireg
, 0);
3042 /* The stack pointer can only appear as a base register,
3043 never an index register, so exchange the regs if it is wrong. */
3045 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3054 /* output breg+ireg*scale */
3055 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3062 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3064 scale
= INTVAL (XEXP (addr
, 0));
3065 ireg
= XEXP (addr
, 1);
3069 scale
= INTVAL (XEXP (addr
, 1));
3070 ireg
= XEXP (addr
, 0);
3072 output_addr_const (file
, const0_rtx
);
3073 PRINT_B_I_S ((rtx
) 0, ireg
, scale
, file
);
3078 if (GET_CODE (addr
) == CONST_INT
3079 && INTVAL (addr
) < 0x8000
3080 && INTVAL (addr
) >= -0x8000)
3081 fprintf (file
, "%d", INTVAL (addr
));
3085 output_pic_addr_const (file
, addr
, 0);
3087 output_addr_const (file
, addr
);
3092 /* Set the cc_status for the results of an insn whose pattern is EXP.
3093 On the 80386, we assume that only test and compare insns, as well
3094 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3095 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3096 Also, we assume that jumps, moves and sCOND don't affect the condition
3097 codes. All else clobbers the condition codes, by assumption.
3099 We assume that ALL integer add, minus, etc. instructions effect the
3100 condition codes. This MUST be consistent with i386.md.
3102 We don't record any float test or compare - the redundant test &
3103 compare check in final.c does not handle stack-like regs correctly. */
3106 notice_update_cc (exp
)
3109 if (GET_CODE (exp
) == SET
)
3111 /* Jumps do not alter the cc's. */
3112 if (SET_DEST (exp
) == pc_rtx
)
3114 #ifdef IS_STACK_MODE
3115 /* Moving into a memory of stack_mode may have been moved
3116 in between the use and set of cc0 by loop_spl(). So
3117 old value of cc.status must be retained */
3118 if(GET_CODE(SET_DEST(exp
))==MEM
3119 && IS_STACK_MODE(GET_MODE(SET_DEST(exp
))))
3124 /* Moving register or memory into a register:
3125 it doesn't alter the cc's, but it might invalidate
3126 the RTX's which we remember the cc's came from.
3127 (Note that moving a constant 0 or 1 MAY set the cc's). */
3128 if (REG_P (SET_DEST (exp
))
3129 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3130 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3132 if (cc_status
.value1
3133 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3134 cc_status
.value1
= 0;
3135 if (cc_status
.value2
3136 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3137 cc_status
.value2
= 0;
3140 /* Moving register into memory doesn't alter the cc's.
3141 It may invalidate the RTX's which we remember the cc's came from. */
3142 if (GET_CODE (SET_DEST (exp
)) == MEM
3143 && (REG_P (SET_SRC (exp
))
3144 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3146 if (cc_status
.value1
&& GET_CODE (cc_status
.value1
) == MEM
3147 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3148 cc_status
.value1
= 0;
3149 if (cc_status
.value2
&& GET_CODE (cc_status
.value2
) == MEM
3150 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3151 cc_status
.value2
= 0;
3154 /* Function calls clobber the cc's. */
3155 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3160 /* Tests and compares set the cc's in predictable ways. */
3161 else if (SET_DEST (exp
) == cc0_rtx
)
3164 cc_status
.value1
= SET_SRC (exp
);
3167 /* Certain instructions effect the condition codes. */
3168 else if (GET_MODE (SET_SRC (exp
)) == SImode
3169 || GET_MODE (SET_SRC (exp
)) == HImode
3170 || GET_MODE (SET_SRC (exp
)) == QImode
)
3171 switch (GET_CODE (SET_SRC (exp
)))
3173 case ASHIFTRT
: case LSHIFTRT
:
3175 /* Shifts on the 386 don't set the condition codes if the
3176 shift count is zero. */
3177 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3182 /* We assume that the CONST_INT is non-zero (this rtx would
3183 have been deleted if it were zero. */
3185 case PLUS
: case MINUS
: case NEG
:
3186 case AND
: case IOR
: case XOR
:
3187 cc_status
.flags
= CC_NO_OVERFLOW
;
3188 cc_status
.value1
= SET_SRC (exp
);
3189 cc_status
.value2
= SET_DEST (exp
);
3200 else if (GET_CODE (exp
) == PARALLEL
3201 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3203 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3205 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3208 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3209 cc_status
.flags
|= CC_IN_80387
;
3211 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3222 /* Split one or more DImode RTL references into pairs of SImode
3223 references. The RTL can be REG, offsettable MEM, integer constant, or
3224 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3225 split and "num" is its length. lo_half and hi_half are output arrays
3226 that parallel "operands". */
3229 split_di (operands
, num
, lo_half
, hi_half
)
3232 rtx lo_half
[], hi_half
[];
3236 if (GET_CODE (operands
[num
]) == REG
)
3238 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3239 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3241 else if (CONSTANT_P (operands
[num
]))
3243 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3245 else if (offsettable_memref_p (operands
[num
]))
3247 lo_half
[num
] = operands
[num
];
3248 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3255 /* Return 1 if this is a valid binary operation on a 387.
3256 OP is the expression matched, and MODE is its mode. */
3259 binary_387_op (op
, mode
)
3261 enum machine_mode mode
;
3263 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3266 switch (GET_CODE (op
))
3272 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3280 /* Return 1 if this is a valid shift or rotate operation on a 386.
3281 OP is the expression matched, and MODE is its mode. */
3286 enum machine_mode mode
;
3288 rtx operand
= XEXP (op
, 0);
3290 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3293 if (GET_MODE (operand
) != GET_MODE (op
)
3294 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3297 return (GET_CODE (op
) == ASHIFT
3298 || GET_CODE (op
) == ASHIFTRT
3299 || GET_CODE (op
) == LSHIFTRT
3300 || GET_CODE (op
) == ROTATE
3301 || GET_CODE (op
) == ROTATERT
);
3304 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3305 MODE is not used. */
3308 VOIDmode_compare_op (op
, mode
)
3310 enum machine_mode mode
;
3312 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3315 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3316 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3317 is the expression of the binary operation. The output may either be
3318 emitted here, or returned to the caller, like all output_* functions.
3320 There is no guarantee that the operands are the same mode, as they
3321 might be within FLOAT or FLOAT_EXTEND expressions. */
3324 output_387_binary_op (insn
, operands
)
3330 static char buf
[100];
3332 switch (GET_CODE (operands
[3]))
3335 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3336 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3343 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3344 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3351 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3352 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3359 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3360 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3370 strcpy (buf
, base_op
);
3372 switch (GET_CODE (operands
[3]))
3376 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3379 operands
[2] = operands
[1];
3383 if (GET_CODE (operands
[2]) == MEM
)
3384 return strcat (buf
, AS1 (%z2
,%2));
3386 if (NON_STACK_REG_P (operands
[1]))
3388 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3391 else if (NON_STACK_REG_P (operands
[2]))
3393 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3397 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3398 return strcat (buf
, AS2 (p
,%2,%0));
3400 if (STACK_TOP_P (operands
[0]))
3401 return strcat (buf
, AS2C (%y2
,%0));
3403 return strcat (buf
, AS2C (%2,%0));
3407 if (GET_CODE (operands
[1]) == MEM
)
3408 return strcat (buf
, AS1 (r
%z1
,%1));
3410 if (GET_CODE (operands
[2]) == MEM
)
3411 return strcat (buf
, AS1 (%z2
,%2));
3413 if (NON_STACK_REG_P (operands
[1]))
3415 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3418 else if (NON_STACK_REG_P (operands
[2]))
3420 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3424 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3427 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3428 return strcat (buf
, AS2 (rp
,%2,%0));
3430 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3431 return strcat (buf
, AS2 (p
,%1,%0));
3433 if (STACK_TOP_P (operands
[0]))
3435 if (STACK_TOP_P (operands
[1]))
3436 return strcat (buf
, AS2C (%y2
,%0));
3438 return strcat (buf
, AS2 (r
,%y1
,%0));
3440 else if (STACK_TOP_P (operands
[1]))
3441 return strcat (buf
, AS2C (%1,%0));
3443 return strcat (buf
, AS2 (r
,%2,%0));
3450 /* Output code for INSN to convert a float to a signed int. OPERANDS
3451 are the insn operands. The output may be SFmode or DFmode and the
3452 input operand may be SImode or DImode. As a special case, make sure
3453 that the 387 stack top dies if the output mode is DImode, because the
3454 hardware requires this. */
3457 output_fix_trunc (insn
, operands
)
3461 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3464 if (! STACK_TOP_P (operands
[1]) ||
3465 (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3468 xops
[0] = GEN_INT (12);
3469 xops
[1] = operands
[4];
3471 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3472 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3473 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3474 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3475 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3477 if (NON_STACK_REG_P (operands
[0]))
3478 output_to_reg (operands
[0], stack_top_dies
);
3479 else if (GET_CODE (operands
[0]) == MEM
)
3482 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3484 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3489 return AS1 (fldc
%W2
,%2);
3492 /* Output code for INSN to compare OPERANDS. The two operands might
3493 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3494 expression. If the compare is in mode CCFPEQmode, use an opcode that
3495 will not fault if a qNaN is present. */
3498 output_float_compare (insn
, operands
)
3503 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3504 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3507 if (! STACK_TOP_P (operands
[0]))
3510 operands
[0] = operands
[1];
3512 cc_status
.flags
|= CC_REVERSED
;
3515 if (! STACK_TOP_P (operands
[0]))
3518 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3520 if (STACK_REG_P (operands
[1])
3522 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3523 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3525 /* If both the top of the 387 stack dies, and the other operand
3526 is also a stack register that dies, then this must be a
3527 `fcompp' float compare */
3529 if (unordered_compare
)
3530 output_asm_insn ("fucompp", operands
);
3532 output_asm_insn ("fcompp", operands
);
3536 static char buf
[100];
3538 /* Decide if this is the integer or float compare opcode, or the
3539 unordered float compare. */
3541 if (unordered_compare
)
3542 strcpy (buf
, "fucom");
3543 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
3544 strcpy (buf
, "fcom");
3546 strcpy (buf
, "ficom");
3548 /* Modify the opcode if the 387 stack is to be popped. */
3553 if (NON_STACK_REG_P (operands
[1]))
3554 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3556 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
3559 /* Now retrieve the condition code. */
3561 return output_fp_cc0_set (insn
);
3564 /* Output opcodes to transfer the results of FP compare or test INSN
3565 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3566 result of the compare or test is unordered, no comparison operator
3567 succeeds except NE. Return an output template, if any. */
3570 output_fp_cc0_set (insn
)
3574 rtx unordered_label
;
3578 xops
[0] = gen_rtx (REG
, HImode
, 0);
3579 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
3581 if (! TARGET_IEEE_FP
)
3583 if (!(cc_status
.flags
& CC_REVERSED
))
3585 next
= next_cc0_user (insn
);
3587 if (GET_CODE (next
) == JUMP_INSN
3588 && GET_CODE (PATTERN (next
)) == SET
3589 && SET_DEST (PATTERN (next
)) == pc_rtx
3590 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3592 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3594 else if (GET_CODE (PATTERN (next
)) == SET
)
3596 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3602 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
3603 || code
== LE
|| code
== GE
)
3604 { /* We will test eax directly */
3605 cc_status
.flags
|= CC_TEST_AX
;
3612 next
= next_cc0_user (insn
);
3613 if (next
== NULL_RTX
)
3616 if (GET_CODE (next
) == JUMP_INSN
3617 && GET_CODE (PATTERN (next
)) == SET
3618 && SET_DEST (PATTERN (next
)) == pc_rtx
3619 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3621 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3623 else if (GET_CODE (PATTERN (next
)) == SET
)
3625 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3630 xops
[0] = gen_rtx (REG
, QImode
, 0);
3635 xops
[1] = GEN_INT (0x45);
3636 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3641 xops
[1] = GEN_INT (0x45);
3642 xops
[2] = GEN_INT (0x01);
3643 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3644 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3649 xops
[1] = GEN_INT (0x05);
3650 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3655 xops
[1] = GEN_INT (0x45);
3656 xops
[2] = GEN_INT (0x40);
3657 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3658 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
3659 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3664 xops
[1] = GEN_INT (0x45);
3665 xops
[2] = GEN_INT (0x40);
3666 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3667 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3672 xops
[1] = GEN_INT (0x44);
3673 xops
[2] = GEN_INT (0x40);
3674 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3675 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
3689 #define MAX_386_STACK_LOCALS 2
3691 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3693 /* Define the structure for the machine field in struct function. */
3694 struct machine_function
3696 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3699 /* Functions to save and restore i386_stack_locals.
3700 These will be called, via pointer variables,
3701 from push_function_context and pop_function_context. */
3704 save_386_machine_status (p
)
3707 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
3708 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
3709 sizeof i386_stack_locals
);
3713 restore_386_machine_status (p
)
3716 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
3717 sizeof i386_stack_locals
);
3721 /* Clear stack slot assignments remembered from previous functions.
3722 This is called from INIT_EXPANDERS once before RTL is emitted for each
3726 clear_386_stack_locals ()
3728 enum machine_mode mode
;
3731 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
3732 mode
= (enum machine_mode
) ((int) mode
+ 1))
3733 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
3734 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
3736 /* Arrange to save and restore i386_stack_locals around nested functions. */
3737 save_machine_status
= save_386_machine_status
;
3738 restore_machine_status
= restore_386_machine_status
;
3741 /* Return a MEM corresponding to a stack slot with mode MODE.
3742 Allocate a new slot if necessary.
3744 The RTL for a function can have several slots available: N is
3745 which slot to use. */
3748 assign_386_stack_local (mode
, n
)
3749 enum machine_mode mode
;
3752 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
3755 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
3756 i386_stack_locals
[(int) mode
][n
]
3757 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
3759 return i386_stack_locals
[(int) mode
][n
];
3765 enum machine_mode mode
;
3767 return (GET_CODE (op
) == MULT
);
3772 enum machine_mode mode
;
3774 return (GET_CODE (op
) == DIV
);
3779 /* Create a new copy of an rtx.
3780 Recursively copies the operands of the rtx,
3781 except for those few rtx codes that are sharable.
3782 Doesn't share CONST */
3790 register RTX_CODE code
;
3791 register char *format_ptr
;
3793 code
= GET_CODE (orig
);
3806 /* SCRATCH must be shared because they represent distinct values. */
3811 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3812 a LABEL_REF, it isn't sharable. */
3813 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3814 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
3815 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
3819 /* A MEM with a constant address is not sharable. The problem is that
3820 the constant address may need to be reloaded. If the mem is shared,
3821 then reloading one copy of this mem will cause all copies to appear
3822 to have been reloaded. */
3825 copy
= rtx_alloc (code
);
3826 PUT_MODE (copy
, GET_MODE (orig
));
3827 copy
->in_struct
= orig
->in_struct
;
3828 copy
->volatil
= orig
->volatil
;
3829 copy
->unchanging
= orig
->unchanging
;
3830 copy
->integrated
= orig
->integrated
;
3832 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
3834 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
3836 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
3838 switch (*format_ptr
++)
3841 XEXP (copy
, i
) = XEXP (orig
, i
);
3842 if (XEXP (orig
, i
) != NULL
)
3843 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
3848 XEXP (copy
, i
) = XEXP (orig
, i
);
3853 XVEC (copy
, i
) = XVEC (orig
, i
);
3854 if (XVEC (orig
, i
) != NULL
)
3856 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
3857 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
3858 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
3863 XWINT (copy
, i
) = XWINT (orig
, i
);
3867 XINT (copy
, i
) = XINT (orig
, i
);
3872 XSTR (copy
, i
) = XSTR (orig
, i
);
3883 /* try to rewrite a memory address to make it valid */
3885 rewrite_address (mem_rtx
)
3888 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
3890 int offset_adjust
= 0;
3891 int was_only_offset
= 0;
3892 rtx mem_addr
= XEXP (mem_rtx
, 0);
3893 char *storage
= (char *) oballoc (0);
3895 int is_spill_rtx
= 0;
3897 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
3898 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
3900 if (GET_CODE (mem_addr
) == PLUS
&&
3901 GET_CODE (XEXP (mem_addr
, 1)) == PLUS
&&
3902 GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
3903 { /* this part is utilized by the combiner */
3905 gen_rtx (PLUS
, GET_MODE (mem_addr
),
3906 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
3908 XEXP (XEXP (mem_addr
, 1), 0)),
3909 XEXP (XEXP (mem_addr
, 1), 1));
3910 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
3912 XEXP (mem_rtx
, 0) = ret_rtx
;
3913 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
3919 /* this part is utilized by loop.c */
3920 /* If the address contains PLUS (reg,const) and this pattern is invalid
3921 in this case - try to rewrite the address to make it valid intel1
3923 storage
= (char *) oballoc (0);
3924 index_rtx
= base_rtx
= offset_rtx
= NULL
;
3925 /* find the base index and offset elements of the memory address */
3926 if (GET_CODE (mem_addr
) == PLUS
)
3928 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
3930 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
3932 base_rtx
= XEXP (mem_addr
, 1);
3933 index_rtx
= XEXP (mem_addr
, 0);
3937 base_rtx
= XEXP (mem_addr
, 0);
3938 offset_rtx
= XEXP (mem_addr
, 1);
3941 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
3943 index_rtx
= XEXP (mem_addr
, 0);
3944 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
3946 base_rtx
= XEXP (mem_addr
, 1);
3950 offset_rtx
= XEXP (mem_addr
, 1);
3953 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
3956 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
&&
3957 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
&&
3958 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0)) == REG
&&
3959 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1)) == CONST_INT
&&
3960 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1)) == CONST_INT
&&
3961 GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
&&
3962 GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
3964 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
3965 offset_rtx
= XEXP (mem_addr
, 1);
3966 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
3967 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
3971 offset_rtx
= XEXP (mem_addr
, 1);
3972 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
3973 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
3976 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
3978 was_only_offset
= 1;
3981 offset_rtx
= XEXP (mem_addr
, 1);
3982 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
3983 if (offset_adjust
== 0)
3985 XEXP (mem_rtx
, 0) = offset_rtx
;
3986 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
3996 else if (GET_CODE (mem_addr
) == MULT
)
3998 index_rtx
= mem_addr
;
4005 if (index_rtx
&& GET_CODE (index_rtx
) == MULT
)
4007 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4012 scale_rtx
= XEXP (index_rtx
, 1);
4013 scale
= INTVAL (scale_rtx
);
4014 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4016 /* now find which of the elements are invalid and try to fix them */
4017 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4019 offset_adjust
= INTVAL (index_rtx
) * scale
;
4020 if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST
&&
4021 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4023 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4024 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4026 offset_rtx
= copy_all_rtx (offset_rtx
);
4027 XEXP (XEXP (offset_rtx
, 0), 1) =
4028 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4029 if (!CONSTANT_P (offset_rtx
))
4036 else if (offset_rtx
&& GET_CODE (offset_rtx
) == SYMBOL_REF
)
4039 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4040 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4042 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4043 if (!CONSTANT_P (offset_rtx
))
4049 else if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST_INT
)
4051 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4053 else if (!offset_rtx
)
4055 offset_rtx
= gen_rtx (CONST_INT
, 0, 0);
4057 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4058 XEXP (mem_rtx
, 0) = offset_rtx
;
4061 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
&&
4062 GET_CODE (XEXP (base_rtx
, 0)) == REG
&&
4063 GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4065 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4066 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4068 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4070 offset_adjust
+= INTVAL (base_rtx
);
4073 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
&&
4074 GET_CODE (XEXP (index_rtx
, 0)) == REG
&&
4075 GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4077 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4078 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4082 if (!LEGITIMATE_INDEX_P (index_rtx
)
4083 && !(index_rtx
== stack_pointer_rtx
&& scale
== 1 && base_rtx
== NULL
))
4091 if (!LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4097 if (offset_adjust
!= 0)
4101 if (GET_CODE (offset_rtx
) == CONST
&&
4102 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4104 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4105 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4107 offset_rtx
= copy_all_rtx (offset_rtx
);
4108 XEXP (XEXP (offset_rtx
, 0), 1) =
4109 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4110 if (!CONSTANT_P (offset_rtx
))
4117 else if (GET_CODE (offset_rtx
) == SYMBOL_REF
)
4120 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4121 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4123 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4124 if (!CONSTANT_P (offset_rtx
))
4130 else if (GET_CODE (offset_rtx
) == CONST_INT
)
4132 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4142 offset_rtx
= gen_rtx (CONST_INT
, 0, offset_adjust
);
4150 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4151 INTVAL (offset_rtx
) == 0)
4153 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4154 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4160 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4161 gen_rtx (PLUS
, GET_MODE (base_rtx
),
4162 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4170 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4171 INTVAL (offset_rtx
) == 0)
4173 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, base_rtx
);
4177 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4178 gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
,
4188 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4189 INTVAL (offset_rtx
) == 0)
4191 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
, scale_rtx
);
4196 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4197 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4204 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4205 INTVAL (offset_rtx
) == 0)
4207 ret_rtx
= index_rtx
;
4211 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, offset_rtx
);
4220 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4221 INTVAL (offset_rtx
) == 0)
4227 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
, offset_rtx
);
4230 else if (was_only_offset
)
4232 ret_rtx
= offset_rtx
;
4240 XEXP (mem_rtx
, 0) = ret_rtx
;
4241 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4253 /* return 1 if the first insn to set cc before insn also sets the register
4254 reg_rtx - otherwise return 0 */
4256 last_to_set_cc (reg_rtx
, insn
)
4259 rtx prev_insn
= PREV_INSN (insn
);
4263 if (GET_CODE (prev_insn
) == NOTE
)
4266 else if (GET_CODE (prev_insn
) == INSN
)
4268 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4271 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4273 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4279 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4286 prev_insn
= PREV_INSN (prev_insn
);
4294 doesnt_set_condition_code (pat
)
4297 switch (GET_CODE (pat
))
4311 sets_condition_code (pat
)
4314 switch (GET_CODE (pat
))
4338 str_immediate_operand (op
, mode
)
4340 enum machine_mode mode
;
4342 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4354 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4355 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4356 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4357 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4366 Return 1 if the mode of the SET_DEST of insn is floating point
4367 and it is not an fld or a move from memory to memory.
4368 Otherwise return 0 */
4373 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4374 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4375 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4376 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4377 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4378 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4379 && GET_CODE (SET_SRC (insn
)) != MEM
)
4388 Return 1 if the mode of the SET_DEST floating point and is memory
4389 and the source is a register.
4395 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4396 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4397 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4398 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4399 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4400 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4410 Return 1 if dep_insn sets a register which insn uses as a base
4411 or index to reference memory.
4412 otherwise return 0 */
4415 agi_dependent (insn
, dep_insn
)
4418 if (GET_CODE (dep_insn
) == INSN
4419 && GET_CODE (PATTERN (dep_insn
)) == SET
4420 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4422 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
));
4425 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4426 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4427 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4428 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4430 return (reg_mentioned_in_mem (stack_pointer_rtx
, insn
));
4438 Return 1 if reg is used in rtl as a base or index for a memory ref
4439 otherwise return 0. */
4442 reg_mentioned_in_mem (reg
, rtl
)
4447 register enum rtx_code code
;
4452 code
= GET_CODE (rtl
);
4470 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4473 fmt
= GET_RTX_FORMAT (code
);
4474 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4479 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4481 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4486 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4493 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4495 operands[0] = result, initialized with the startaddress
4496 operands[1] = alignment of the address.
4497 operands[2] = scratch register, initialized with the startaddress when
4498 not aligned, otherwise undefined
4500 This is just the body. It needs the initialisations mentioned above and
4501 some address computing at the end. These things are done in i386.md. */
4504 output_strlen_unroll (operands
)
4509 xops
[0] = operands
[0]; /* Result */
4510 /* operands[1]; * Alignment */
4511 xops
[1] = operands
[2]; /* Scratch */
4512 xops
[2] = GEN_INT (0);
4513 xops
[3] = GEN_INT (2);
4514 xops
[4] = GEN_INT (3);
4515 xops
[5] = GEN_INT (4);
4516 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4517 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4518 xops
[8] = gen_label_rtx (); /* label of main loop */
4519 if(TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4520 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4521 xops
[10] = gen_label_rtx (); /* end label 2 */
4522 xops
[11] = gen_label_rtx (); /* end label 1 */
4523 xops
[12] = gen_label_rtx (); /* end label */
4524 /* xops[13] * Temporary used */
4525 xops
[14] = GEN_INT (0xff);
4526 xops
[15] = GEN_INT (0xff00);
4527 xops
[16] = GEN_INT (0xff0000);
4528 xops
[17] = GEN_INT (0xff000000);
4530 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4532 /* is there a known alignment and is it less then 4 */
4533 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4535 /* is there a known alignment and is it not 2 */
4536 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4538 xops
[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4539 xops
[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4541 /* leave just the 3 lower bits */
4542 /* if this is a q-register, then the high part is used later */
4543 /* therefore user andl rather than andb */
4544 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4545 /* is aligned to 4-byte adress when zero */
4546 output_asm_insn (AS1 (je
,%l8
), xops
);
4547 /* side-effect even Parity when %eax == 3 */
4548 output_asm_insn (AS1 (jp
,%6), xops
);
4550 /* is it aligned to 2 bytes ? */
4551 if (QI_REG_P (xops
[1]))
4552 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4554 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4555 output_asm_insn (AS1 (je
,%7), xops
);
4559 /* since the alignment is 2, we have to check 2 or 0 bytes */
4561 /* check if is aligned to 4 - byte */
4562 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4563 /* is aligned to 4-byte adress when zero */
4564 output_asm_insn (AS1 (je
,%l8
), xops
);
4567 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4568 /* now, compare the bytes */
4569 /* compare with the high part of a q-reg gives shorter code */
4570 if (QI_REG_P (xops
[1]))
4572 /* compare the first n unaligned byte on a byte per byte basis */
4573 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4574 /* when zero we reached the end */
4575 output_asm_insn (AS1 (je
,%l12
), xops
);
4576 /* increment the address */
4577 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4579 /* not needed with an alignment of 2 */
4580 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4582 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4583 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4584 output_asm_insn (AS1 (je
,%l12
), xops
);
4585 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4587 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4589 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4593 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4594 output_asm_insn (AS1 (je
,%l12
), xops
);
4595 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4597 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4598 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4599 output_asm_insn (AS1 (je
,%l12
), xops
);
4600 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4602 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4603 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4605 output_asm_insn (AS1 (je
,%l12
), xops
);
4606 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4609 /* Generate loop to check 4 bytes at a time */
4610 /* IMHO it is not a good idea to align this loop. It gives only */
4611 /* huge programs, but does not help to speed up */
4612 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4613 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4615 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4616 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4618 if (QI_REG_P (xops
[1]))
4620 /* On i586 it is faster to combine the hi- and lo- part as
4621 a kind of lookahead. If anding both yields zero, then one
4622 of both *could* be zero, otherwise none of both is zero;
4623 this saves one instruction, on i486 this is slower
4624 tested with P-90, i486DX2-66, AMD486DX2-66 */
4627 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
4628 output_asm_insn (AS1 (jne
,%l9
), xops
);
4631 /* check first byte */
4632 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
4633 output_asm_insn (AS1 (je
,%l12
), xops
);
4635 /* check second byte */
4636 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
4637 output_asm_insn (AS1 (je
,%l11
), xops
);
4640 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[9]));
4644 /* check first byte */
4645 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
4646 output_asm_insn (AS1 (je
,%l12
), xops
);
4648 /* check second byte */
4649 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
4650 output_asm_insn (AS1 (je
,%l11
), xops
);
4653 /* check third byte */
4654 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
4655 output_asm_insn (AS1 (je
,%l10
), xops
);
4657 /* check fourth byte and increment address */
4658 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
4659 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
4660 output_asm_insn (AS1 (jne
,%l8
), xops
);
4662 /* now generate fixups when the compare stops within a 4-byte word */
4663 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
4665 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
4666 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4668 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
4669 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4671 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));