1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 enum reg_mem
/* Type of an operand for ix86_{binary,unary}_operator_ok */
55 /* Processor costs (relative to an add) */
56 struct processor_costs i386_cost
= { /* 386 specific costs */
57 1, /* cost of an add instruction (2 cycles) */
58 1, /* cost of a lea instruction */
59 3, /* variable shift costs */
60 2, /* constant shift costs */
61 6, /* cost of starting a multiply */
62 1, /* cost of multiply per each bit set */
63 23 /* cost of a divide/mod */
66 struct processor_costs i486_cost
= { /* 486 specific costs */
67 1, /* cost of an add instruction */
68 1, /* cost of a lea instruction */
69 3, /* variable shift costs */
70 2, /* constant shift costs */
71 12, /* cost of starting a multiply */
72 1, /* cost of multiply per each bit set */
73 40 /* cost of a divide/mod */
76 struct processor_costs pentium_cost
= {
77 1, /* cost of an add instruction */
78 1, /* cost of a lea instruction */
79 3, /* variable shift costs */
80 1, /* constant shift costs */
81 12, /* cost of starting a multiply */
82 1, /* cost of multiply per each bit set */
83 25 /* cost of a divide/mod */
86 struct processor_costs
*ix86_cost
= &pentium_cost
;
88 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
90 extern FILE *asm_out_file
;
91 extern char *strcat ();
93 char *singlemove_string ();
94 char *output_move_const_single ();
95 char *output_fp_cc0_set ();
97 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
98 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
99 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
101 /* Array of the smallest class containing reg number REGNO, indexed by
102 REGNO. Used by REGNO_REG_CLASS in i386.h. */
104 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
107 AREG
, DREG
, CREG
, BREG
,
109 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
111 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
112 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
117 /* Test and compare insns in i386.md store the information needed to
118 generate branch and scc insns here. */
120 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
121 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
122 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
124 /* which cpu are we scheduling for */
125 enum processor_type ix86_cpu
;
127 /* which instruction set architecture to use. */
130 /* Strings to hold which cpu and instruction set architecture to use. */
131 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
132 char *ix86_isa_string
; /* for -misa=<xxx> */
134 /* Register allocation order */
135 char *i386_reg_alloc_order
;
136 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
138 /* # of registers to use to pass arguments. */
139 char *i386_regparm_string
; /* # registers to use to pass args */
140 int i386_regparm
; /* i386_regparm_string as a number */
142 /* Alignment to use for loops and jumps */
143 char *i386_align_loops_string
; /* power of two alignment for loops */
144 char *i386_align_jumps_string
; /* power of two alignment for non-loop jumps */
145 char *i386_align_funcs_string
; /* power of two alignment for functions */
146 char *i386_branch_cost_string
; /* values 1-5: see jump.c */
148 int i386_align_loops
; /* power of two alignment for loops */
149 int i386_align_jumps
; /* power of two alignment for non-loop jumps */
150 int i386_align_funcs
; /* power of two alignment for functions */
151 int i386_branch_cost
; /* values 1-5: see jump.c */
153 /* Sometimes certain combinations of command options do not make
154 sense on a particular target machine. You can define a macro
155 `OVERRIDE_OPTIONS' to take account of this. This macro, if
156 defined, is executed once just after all the command options have
159 Don't use this macro to turn on various extra optimizations for
160 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
171 char *name
; /* Canonical processor name. */
172 enum processor_type processor
; /* Processor type enum value. */
173 struct processor_costs
*cost
; /* Processor costs */
174 int target_enable
; /* Target flags to enable. */
175 int target_disable
; /* Target flags to disable. */
176 } processor_target_table
[]
177 = {{PROCESSOR_COMMON_STRING
, PROCESSOR_COMMON
, &i486_cost
, 0, 0},
178 {PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
179 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
180 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
181 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
182 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0},
183 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
, &pentium_cost
, 0, 0}};
185 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
187 #ifdef SUBTARGET_OVERRIDE_OPTIONS
188 SUBTARGET_OVERRIDE_OPTIONS
;
191 /* Validate registers in register allocation order */
192 if (i386_reg_alloc_order
)
194 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
198 case 'a': regno
= 0; break;
199 case 'd': regno
= 1; break;
200 case 'c': regno
= 2; break;
201 case 'b': regno
= 3; break;
202 case 'S': regno
= 4; break;
203 case 'D': regno
= 5; break;
204 case 'B': regno
= 6; break;
206 default: fatal ("Register '%c' is unknown", ch
);
209 if (regs_allocated
[regno
])
210 fatal ("Register '%c' was already specified in the allocation order", ch
);
212 regs_allocated
[regno
] = 1;
216 /* Get the architectural level. */
217 if (ix86_isa_string
== (char *)0)
218 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
220 for (i
= 0; i
< ptt_size
; i
++)
221 if (! strcmp (ix86_isa_string
, processor_target_table
[i
].name
))
223 ix86_isa
= processor_target_table
[i
].processor
;
224 if (ix86_cpu_string
== (char *)0)
225 ix86_cpu_string
= processor_target_table
[i
].name
;
231 error ("bad value (%s) for -misa= switch", ix86_isa_string
);
232 ix86_isa_string
= PROCESSOR_DEFAULT_STRING
;
233 ix86_isa
= PROCESSOR_DEFAULT
;
236 for (j
= 0; j
< ptt_size
; j
++)
237 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
239 ix86_cpu
= processor_target_table
[j
].processor
;
240 if (i
> j
&& (int)ix86_isa
>= (int)PROCESSOR_PENTIUMPRO
)
241 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string
, ix86_isa_string
);
243 target_flags
|= processor_target_table
[j
].target_enable
;
244 target_flags
&= ~processor_target_table
[j
].target_disable
;
250 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
251 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
252 ix86_cpu
= PROCESSOR_DEFAULT
;
255 /* Validate -mregparm= value */
256 if (i386_regparm_string
)
258 i386_regparm
= atoi (i386_regparm_string
);
259 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
260 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm
, REGPARM_MAX
);
263 def_align
= (TARGET_386
) ? 2 : 4;
265 /* Validate -malign-loops= value, or provide default */
266 if (i386_align_loops_string
)
268 i386_align_loops
= atoi (i386_align_loops_string
);
269 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
270 fatal ("-malign-loops=%d is not between 0 and %d",
271 i386_align_loops
, MAX_CODE_ALIGN
);
274 i386_align_loops
= 2;
276 /* Validate -malign-jumps= value, or provide default */
277 if (i386_align_jumps_string
)
279 i386_align_jumps
= atoi (i386_align_jumps_string
);
280 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
281 fatal ("-malign-jumps=%d is not between 0 and %d",
282 i386_align_jumps
, MAX_CODE_ALIGN
);
285 i386_align_jumps
= def_align
;
287 /* Validate -malign-functions= value, or provide default */
288 if (i386_align_funcs_string
)
290 i386_align_funcs
= atoi (i386_align_funcs_string
);
291 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
292 fatal ("-malign-functions=%d is not between 0 and %d",
293 i386_align_funcs
, MAX_CODE_ALIGN
);
296 i386_align_funcs
= def_align
;
298 /* Validate -mbranch-cost= value, or provide default */
299 if (i386_branch_cost_string
)
301 i386_branch_cost
= atoi (i386_branch_cost_string
);
302 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
303 fatal ("-mbranch-cost=%d is not between 0 and 5",
307 i386_branch_cost
= TARGET_PENTIUMPRO
? 4 : 1;
309 if (TARGET_OMIT_LEAF_FRAME_POINTER
) /* keep nonleaf frame pointers */
310 flag_omit_frame_pointer
= 1;
312 /* pic references don't explicitly mention pic_offset_table_rtx */
313 /* code threaded into the prologue may conflict with profiling */
314 if (flag_pic
|| profile_flag
|| profile_block_flag
)
315 target_flags
&= ~MASK_SCHEDULE_PROLOGUE
;
318 /* A C statement (sans semicolon) to choose the order in which to
319 allocate hard registers for pseudo-registers local to a basic
322 Store the desired register order in the array `reg_alloc_order'.
323 Element 0 should be the register to allocate first; element 1, the
324 next register; and so on.
326 The macro body should not assume anything about the contents of
327 `reg_alloc_order' before execution of the macro.
329 On most machines, it is not necessary to define this macro. */
332 order_regs_for_local_alloc ()
334 int i
, ch
, order
, regno
;
336 /* User specified the register allocation order */
337 if (i386_reg_alloc_order
)
339 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
343 case 'a': regno
= 0; break;
344 case 'd': regno
= 1; break;
345 case 'c': regno
= 2; break;
346 case 'b': regno
= 3; break;
347 case 'S': regno
= 4; break;
348 case 'D': regno
= 5; break;
349 case 'B': regno
= 6; break;
352 reg_alloc_order
[order
++] = regno
;
355 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
357 if (!regs_allocated
[i
])
358 reg_alloc_order
[order
++] = i
;
362 /* If users did not specify a register allocation order, use natural order */
365 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
366 reg_alloc_order
[i
] = i
;
372 optimization_options (level
)
375 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
376 make the problem with not enough registers even worse */
377 #ifdef INSN_SCHEDULING
379 flag_schedule_insns
= 0;
383 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
384 attribute for DECL. The attributes in ATTRIBUTES have previously been
388 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
397 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
398 attribute for TYPE. The attributes in ATTRIBUTES have previously been
402 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
408 if (TREE_CODE (type
) != FUNCTION_TYPE
409 && TREE_CODE (type
) != FIELD_DECL
410 && TREE_CODE (type
) != TYPE_DECL
)
413 /* Stdcall attribute says callee is responsible for popping arguments
414 if they are not variable. */
415 if (is_attribute_p ("stdcall", identifier
))
416 return (args
== NULL_TREE
);
418 /* Cdecl attribute says the callee is a normal C declaration */
419 if (is_attribute_p ("cdecl", identifier
))
420 return (args
== NULL_TREE
);
422 /* Regparm attribute specifies how many integer arguments are to be
423 passed in registers */
424 if (is_attribute_p ("regparm", identifier
))
428 if (!args
|| TREE_CODE (args
) != TREE_LIST
429 || TREE_CHAIN (args
) != NULL_TREE
430 || TREE_VALUE (args
) == NULL_TREE
)
433 cst
= TREE_VALUE (args
);
434 if (TREE_CODE (cst
) != INTEGER_CST
)
437 if (TREE_INT_CST_HIGH (cst
) != 0
438 || TREE_INT_CST_LOW (cst
) < 0
439 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
448 /* Return 0 if the attributes for two types are incompatible, 1 if they
449 are compatible, and 2 if they are nearly compatible (which causes a
450 warning to be generated). */
453 i386_comp_type_attributes (type1
, type2
)
461 /* Value is the number of bytes of arguments automatically
462 popped when returning from a subroutine call.
463 FUNDECL is the declaration node of the function (as a tree),
464 FUNTYPE is the data type of the function (as a tree),
465 or for a library call it is an identifier node for the subroutine name.
466 SIZE is the number of bytes of arguments passed on the stack.
468 On the 80386, the RTD insn may be used to pop them if the number
469 of args is fixed, but if the number is variable then the caller
470 must pop them all. RTD can't be used for library calls now
471 because the library is compiled with the Unix compiler.
472 Use of RTD is a selectable option, since it is incompatible with
473 standard Unix calling sequences. If the option is not selected,
474 the caller must always pop the args.
476 The attribute stdcall is equivalent to RTD on a per module basis. */
479 i386_return_pops_args (fundecl
, funtype
, size
)
484 int rtd
= TARGET_RTD
;
486 if (TREE_CODE (funtype
) == IDENTIFIER_NODE
)
489 /* Cdecl functions override -mrtd, and never pop the stack */
490 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
492 /* Stdcall functions will pop the stack if not variable args */
493 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
497 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
498 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
)))
502 /* Lose any fake structure return argument */
503 if (aggregate_value_p (TREE_TYPE (funtype
)))
504 return GET_MODE_SIZE (Pmode
);
510 /* Argument support functions. */
512 /* Initialize a variable CUM of type CUMULATIVE_ARGS
513 for a call to a function whose data type is FNTYPE.
514 For a library call, FNTYPE is 0. */
517 init_cumulative_args (cum
, fntype
, libname
)
518 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
519 tree fntype
; /* tree ptr for function decl */
520 rtx libname
; /* SYMBOL_REF of library name or 0 */
522 static CUMULATIVE_ARGS zero_cum
;
523 tree param
, next_param
;
525 if (TARGET_DEBUG_ARG
)
527 fprintf (stderr
, "\ninit_cumulative_args (");
530 tree ret_type
= TREE_TYPE (fntype
);
531 fprintf (stderr
, "fntype code = %s, ret code = %s",
532 tree_code_name
[ (int)TREE_CODE (fntype
) ],
533 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
536 fprintf (stderr
, "no fntype");
539 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
544 /* Set up the number of registers to use for passing arguments. */
545 cum
->nregs
= i386_regparm
;
548 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
550 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
553 /* Determine if this function has variable arguments. This is
554 indicated by the last argument being 'void_type_mode' if there
555 are no variable arguments. If there are variable arguments, then
556 we won't pass anything in registers */
560 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
564 next_param
= TREE_CHAIN (param
);
565 if (next_param
== (tree
)0 && TREE_VALUE (param
) != void_type_node
)
570 if (TARGET_DEBUG_ARG
)
571 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
576 /* Update the data in CUM to advance over an argument
577 of mode MODE and data type TYPE.
578 (TYPE is null for libcalls where that information may not be available.) */
581 function_arg_advance (cum
, mode
, type
, named
)
582 CUMULATIVE_ARGS
*cum
; /* current arg information */
583 enum machine_mode mode
; /* current arg mode */
584 tree type
; /* type of the argument or 0 if lib support */
585 int named
; /* whether or not the argument was named */
587 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
588 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
590 if (TARGET_DEBUG_ARG
)
592 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
593 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
608 /* Define where to put the arguments to a function.
609 Value is zero to push the argument on the stack,
610 or a hard register in which to store the argument.
612 MODE is the argument's machine mode.
613 TYPE is the data type of the argument (as a tree).
614 This is null for libcalls where that information may
616 CUM is a variable of type CUMULATIVE_ARGS which gives info about
617 the preceding args and about the function being called.
618 NAMED is nonzero if this argument is a named parameter
619 (otherwise it is an extra parameter matching an ellipsis). */
622 function_arg (cum
, mode
, type
, named
)
623 CUMULATIVE_ARGS
*cum
; /* current arg information */
624 enum machine_mode mode
; /* current arg mode */
625 tree type
; /* type of the argument or 0 if lib support */
626 int named
; /* != 0 for normal args, == 0 for ... args */
629 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
630 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
634 default: /* for now, pass fp/complex values on the stack */
642 if (words
<= cum
->nregs
)
643 ret
= gen_rtx (REG
, mode
, cum
->regno
);
647 if (TARGET_DEBUG_ARG
)
650 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
651 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
654 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
656 fprintf (stderr
, ", stack");
658 fprintf (stderr
, " )\n");
664 /* For an arg passed partly in registers and partly in memory,
665 this is the number of registers used.
666 For args passed entirely in registers or entirely in memory, zero. */
669 function_arg_partial_nregs (cum
, mode
, type
, named
)
670 CUMULATIVE_ARGS
*cum
; /* current arg information */
671 enum machine_mode mode
; /* current arg mode */
672 tree type
; /* type of the argument or 0 if lib support */
673 int named
; /* != 0 for normal args, == 0 for ... args */
679 /* Output an insn whose source is a 386 integer register. SRC is the
680 rtx for the register, and TEMPLATE is the op-code template. SRC may
681 be either SImode or DImode.
683 The template will be output with operands[0] as SRC, and operands[1]
684 as a pointer to the top of the 386 stack. So a call from floatsidf2
685 would look like this:
687 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
689 where %z0 corresponds to the caller's operands[1], and is used to
690 emit the proper size suffix.
692 ??? Extend this to handle HImode - a 387 can load and store HImode
696 output_op_from_reg (src
, template)
701 int size
= GET_MODE_SIZE (GET_MODE (src
));
704 xops
[1] = AT_SP (Pmode
);
705 xops
[2] = GEN_INT (size
);
706 xops
[3] = stack_pointer_rtx
;
708 if (size
> UNITS_PER_WORD
)
711 if (size
> 2 * UNITS_PER_WORD
)
713 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
714 output_asm_insn (AS1 (push
%L0
,%0), &high
);
716 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
717 output_asm_insn (AS1 (push
%L0
,%0), &high
);
719 output_asm_insn (AS1 (push
%L0
,%0), &src
);
721 output_asm_insn (template, xops
);
723 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
726 /* Output an insn to pop an value from the 387 top-of-stack to 386
727 register DEST. The 387 register stack is popped if DIES is true. If
728 the mode of DEST is an integer mode, a `fist' integer store is done,
729 otherwise a `fst' float store is done. */
732 output_to_reg (dest
, dies
)
737 int size
= GET_MODE_SIZE (GET_MODE (dest
));
739 xops
[0] = AT_SP (Pmode
);
740 xops
[1] = stack_pointer_rtx
;
741 xops
[2] = GEN_INT (size
);
744 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
746 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
749 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
751 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
753 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
756 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
759 if (GET_MODE (dest
) == XFmode
)
761 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
762 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
765 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
771 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
773 if (size
> UNITS_PER_WORD
)
775 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
776 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
777 if (size
> 2 * UNITS_PER_WORD
)
779 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
780 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
786 singlemove_string (operands
)
790 if (GET_CODE (operands
[0]) == MEM
791 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
793 if (XEXP (x
, 0) != stack_pointer_rtx
)
797 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
799 return output_move_const_single (operands
);
801 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
802 return AS2 (mov
%L0
,%1,%0);
803 else if (CONSTANT_P (operands
[1]))
804 return AS2 (mov
%L0
,%1,%0);
807 output_asm_insn ("push%L1 %1", operands
);
812 /* Return a REG that occurs in ADDR with coefficient 1.
813 ADDR can be effectively incremented by incrementing REG. */
819 while (GET_CODE (addr
) == PLUS
)
821 if (GET_CODE (XEXP (addr
, 0)) == REG
)
822 addr
= XEXP (addr
, 0);
823 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
824 addr
= XEXP (addr
, 1);
825 else if (CONSTANT_P (XEXP (addr
, 0)))
826 addr
= XEXP (addr
, 1);
827 else if (CONSTANT_P (XEXP (addr
, 1)))
828 addr
= XEXP (addr
, 0);
832 if (GET_CODE (addr
) == REG
)
838 /* Output an insn to add the constant N to the register X. */
849 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
851 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
854 xops
[1] = GEN_INT (-n
);
855 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
859 xops
[1] = GEN_INT (n
);
860 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
865 /* Output assembler code to perform a doubleword move insn
866 with operands OPERANDS. */
869 output_move_double (operands
)
872 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
876 rtx addreg0
= 0, addreg1
= 0;
877 int dest_overlapped_low
= 0;
878 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
883 /* First classify both operands. */
885 if (REG_P (operands
[0]))
887 else if (offsettable_memref_p (operands
[0]))
889 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
891 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
893 else if (GET_CODE (operands
[0]) == MEM
)
898 if (REG_P (operands
[1]))
900 else if (CONSTANT_P (operands
[1]))
902 else if (offsettable_memref_p (operands
[1]))
904 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
906 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
908 else if (GET_CODE (operands
[1]) == MEM
)
913 /* Check for the cases that the operand constraints are not
914 supposed to allow to happen. Abort if we get one,
915 because generating code for these cases is painful. */
917 if (optype0
== RNDOP
|| optype1
== RNDOP
)
920 /* If one operand is decrementing and one is incrementing
921 decrement the former register explicitly
922 and change that operand into ordinary indexing. */
924 if (optype0
== PUSHOP
&& optype1
== POPOP
)
926 /* ??? Can this ever happen on i386? */
927 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
928 asm_add (-size
, operands
[0]);
929 if (GET_MODE (operands
[1]) == XFmode
)
930 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
931 else if (GET_MODE (operands
[0]) == DFmode
)
932 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
934 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
938 if (optype0
== POPOP
&& optype1
== PUSHOP
)
940 /* ??? Can this ever happen on i386? */
941 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
942 asm_add (-size
, operands
[1]);
943 if (GET_MODE (operands
[1]) == XFmode
)
944 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
945 else if (GET_MODE (operands
[1]) == DFmode
)
946 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
948 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
952 /* If an operand is an unoffsettable memory ref, find a register
953 we can increment temporarily to make it refer to the second word. */
955 if (optype0
== MEMOP
)
956 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
958 if (optype1
== MEMOP
)
959 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
961 /* Ok, we can do one word at a time.
962 Normally we do the low-numbered word first,
963 but if either operand is autodecrementing then we
964 do the high-numbered word first.
966 In either case, set up in LATEHALF the operands to use
967 for the high-numbered word and in some cases alter the
968 operands in OPERANDS to be suitable for the low-numbered word. */
972 if (optype0
== REGOP
)
974 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
975 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
977 else if (optype0
== OFFSOP
)
979 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
980 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
984 middlehalf
[0] = operands
[0];
985 latehalf
[0] = operands
[0];
988 if (optype1
== REGOP
)
990 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
991 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
993 else if (optype1
== OFFSOP
)
995 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
996 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
998 else if (optype1
== CNSTOP
)
1000 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1002 REAL_VALUE_TYPE r
; long l
[3];
1004 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1005 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1006 operands
[1] = GEN_INT (l
[0]);
1007 middlehalf
[1] = GEN_INT (l
[1]);
1008 latehalf
[1] = GEN_INT (l
[2]);
1010 else if (CONSTANT_P (operands
[1]))
1011 /* No non-CONST_DOUBLE constant should ever appear here. */
1016 middlehalf
[1] = operands
[1];
1017 latehalf
[1] = operands
[1];
1020 else /* size is not 12: */
1022 if (optype0
== REGOP
)
1023 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1024 else if (optype0
== OFFSOP
)
1025 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1027 latehalf
[0] = operands
[0];
1029 if (optype1
== REGOP
)
1030 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1031 else if (optype1
== OFFSOP
)
1032 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1033 else if (optype1
== CNSTOP
)
1034 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1036 latehalf
[1] = operands
[1];
1039 /* If insn is effectively movd N (sp),-(sp) then we will do the
1040 high word first. We should use the adjusted operand 1
1041 (which is N+4 (sp) or N+8 (sp))
1042 for the low word and middle word as well,
1043 to compensate for the first decrement of sp. */
1044 if (optype0
== PUSHOP
1045 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1046 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1047 middlehalf
[1] = operands
[1] = latehalf
[1];
1049 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1050 if the upper part of reg N does not appear in the MEM, arrange to
1051 emit the move late-half first. Otherwise, compute the MEM address
1052 into the upper part of N and use that as a pointer to the memory
1054 if (optype0
== REGOP
1055 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1057 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1058 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1060 /* If both halves of dest are used in the src memory address,
1061 compute the address into latehalf of dest. */
1063 xops
[0] = latehalf
[0];
1064 xops
[1] = XEXP (operands
[1], 0);
1065 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1066 if( GET_MODE (operands
[1]) == XFmode
)
1069 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1070 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1071 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1075 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1076 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1080 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1082 /* Check for two regs used by both source and dest. */
1083 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1084 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1087 /* JRV says this can't happen: */
1088 if (addreg0
|| addreg1
)
1091 /* Only the middle reg conflicts; simply put it last. */
1092 output_asm_insn (singlemove_string (operands
), operands
);
1093 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1094 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1097 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1098 /* If the low half of dest is mentioned in the source memory
1099 address, the arrange to emit the move late half first. */
1100 dest_overlapped_low
= 1;
1103 /* If one or both operands autodecrementing,
1104 do the two words, high-numbered first. */
1106 /* Likewise, the first move would clobber the source of the second one,
1107 do them in the other order. This happens only for registers;
1108 such overlap can't happen in memory unless the user explicitly
1109 sets it up, and that is an undefined circumstance. */
1112 if (optype0 == PUSHOP || optype1 == PUSHOP
1113 || (optype0 == REGOP && optype1 == REGOP
1114 && REGNO (operands[0]) == REGNO (latehalf[1]))
1115 || dest_overlapped_low)
1117 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1118 || (optype0
== REGOP
&& optype1
== REGOP
1119 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1120 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1121 || dest_overlapped_low
)
1123 /* Make any unoffsettable addresses point at high-numbered word. */
1125 asm_add (size
-4, addreg0
);
1127 asm_add (size
-4, addreg1
);
1130 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1132 /* Undo the adds we just did. */
1134 asm_add (-4, addreg0
);
1136 asm_add (-4, addreg1
);
1140 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1142 asm_add (-4, addreg0
);
1144 asm_add (-4, addreg1
);
1147 /* Do low-numbered word. */
1148 return singlemove_string (operands
);
1151 /* Normal case: do the two words, low-numbered first. */
1153 output_asm_insn (singlemove_string (operands
), operands
);
1155 /* Do the middle one of the three words for long double */
1159 asm_add (4, addreg0
);
1161 asm_add (4, addreg1
);
1163 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1166 /* Make any unoffsettable addresses point at high-numbered word. */
1168 asm_add (4, addreg0
);
1170 asm_add (4, addreg1
);
1173 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1175 /* Undo the adds we just did. */
1177 asm_add (4-size
, addreg0
);
1179 asm_add (4-size
, addreg1
);
1185 #define MAX_TMPS 2 /* max temporary registers used */
1187 /* Output the appropriate code to move push memory on the stack */
1190 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1202 } tmp_info
[MAX_TMPS
];
1204 rtx src
= operands
[1];
1207 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1208 int stack_offset
= 0;
1212 if (!offsettable_memref_p (src
))
1213 fatal_insn ("Source is not offsettable", insn
);
1215 if ((length
& 3) != 0)
1216 fatal_insn ("Pushing non-word aligned size", insn
);
1218 /* Figure out which temporary registers we have available */
1219 for (i
= tmp_start
; i
< n_operands
; i
++)
1221 if (GET_CODE (operands
[i
]) == REG
)
1223 if (reg_overlap_mentioned_p (operands
[i
], src
))
1226 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1227 if (max_tmps
== MAX_TMPS
)
1233 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1235 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1236 output_asm_insn (AS1(push
%L0
,%0), xops
);
1242 for (offset
= length
- 4; offset
>= 0; )
1244 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1246 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1247 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1248 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1252 for (i
= 0; i
< num_tmps
; i
++)
1253 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1255 for (i
= 0; i
< num_tmps
; i
++)
1256 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1259 stack_offset
+= 4*num_tmps
;
1267 /* Output the appropriate code to move data between two memory locations */
1270 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1281 } tmp_info
[MAX_TMPS
];
1283 rtx dest
= operands
[0];
1284 rtx src
= operands
[1];
1285 rtx qi_tmp
= NULL_RTX
;
1291 if (GET_CODE (dest
) == MEM
1292 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1293 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1294 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1296 if (!offsettable_memref_p (src
))
1297 fatal_insn ("Source is not offsettable", insn
);
1299 if (!offsettable_memref_p (dest
))
1300 fatal_insn ("Destination is not offsettable", insn
);
1302 /* Figure out which temporary registers we have available */
1303 for (i
= tmp_start
; i
< n_operands
; i
++)
1305 if (GET_CODE (operands
[i
]) == REG
)
1307 if ((length
& 1) != 0 && !qi_tmp
&& QI_REG_P (operands
[i
]))
1308 qi_tmp
= operands
[i
];
1310 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1311 fatal_insn ("Temporary register overlaps the destination", insn
);
1313 if (reg_overlap_mentioned_p (operands
[i
], src
))
1314 fatal_insn ("Temporary register overlaps the source", insn
);
1316 tmp_info
[ max_tmps
++ ].xops
[2] = operands
[i
];
1317 if (max_tmps
== MAX_TMPS
)
1323 fatal_insn ("No scratch registers were found to do memory->memory moves", insn
);
1325 if ((length
& 1) != 0)
1328 fatal_insn ("No byte register found when moving odd # of bytes.", insn
);
1333 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1337 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1338 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1339 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1340 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1344 else if (length
>= 2)
1346 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1347 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1348 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1349 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1357 for (i
= 0; i
< num_tmps
; i
++)
1358 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1360 for (i
= 0; i
< num_tmps
; i
++)
1361 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1366 xops
[0] = adj_offsettable_operand (dest
, offset
);
1367 xops
[1] = adj_offsettable_operand (src
, offset
);
1369 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1370 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1378 standard_80387_constant_p (x
)
1381 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1386 if (setjmp (handler
))
1389 set_float_handler (handler
);
1390 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1391 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1392 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1393 set_float_handler (NULL_PTR
);
1401 /* Note that on the 80387, other constants, such as pi,
1402 are much slower to load as standard constants
1403 than to load from doubles in memory! */
1410 output_move_const_single (operands
)
1413 if (FP_REG_P (operands
[0]))
1415 int conval
= standard_80387_constant_p (operands
[1]);
1423 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1425 REAL_VALUE_TYPE r
; long l
;
1427 if (GET_MODE (operands
[1]) == XFmode
)
1430 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1431 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1432 operands
[1] = GEN_INT (l
);
1434 return singlemove_string (operands
);
1437 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1438 reference and a constant. */
1441 symbolic_operand (op
, mode
)
1443 enum machine_mode mode
;
1445 switch (GET_CODE (op
))
1452 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1453 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1454 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1460 /* Test for a valid operand for a call instruction.
1461 Don't allow the arg pointer register or virtual regs
1462 since they may change into reg + const, which the patterns
1463 can't handle yet. */
1466 call_insn_operand (op
, mode
)
1468 enum machine_mode mode
;
1470 if (GET_CODE (op
) == MEM
1471 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1472 /* This makes a difference for PIC. */
1473 && general_operand (XEXP (op
, 0), Pmode
))
1474 || (GET_CODE (XEXP (op
, 0)) == REG
1475 && XEXP (op
, 0) != arg_pointer_rtx
1476 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1477 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1482 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1486 expander_call_insn_operand (op
, mode
)
1488 enum machine_mode mode
;
1490 if (GET_CODE (op
) == MEM
1491 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1492 || (GET_CODE (XEXP (op
, 0)) == REG
1493 && XEXP (op
, 0) != arg_pointer_rtx
1494 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1495 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1500 /* Return 1 if OP is a comparison operator that can use the condition code
1501 generated by an arithmetic operation. */
1504 arithmetic_comparison_operator (op
, mode
)
1506 enum machine_mode mode
;
1510 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1512 code
= GET_CODE (op
);
1513 if (GET_RTX_CLASS (code
) != '<')
1516 return (code
!= GT
&& code
!= LE
);
1519 /* Returns 1 if OP contains a symbol reference */
1522 symbolic_reference_mentioned_p (op
)
1528 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1531 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1532 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1538 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1539 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1542 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1549 /* Attempt to expand a binary operator. Make the expansion closer to the
1550 actual machine, then just general_operand, which will allow 3 separate
1551 memory references (one output, two input) in a single insn. Return
1552 whether the insn fails, or succeeds. */
1555 ix86_expand_binary_operator (code
, mode
, operands
)
1557 enum machine_mode mode
;
1564 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1565 if (GET_RTX_CLASS (code
) == 'c'
1566 && (rtx_equal_p (operands
[0], operands
[2])
1567 || immediate_operand (operands
[1], mode
)))
1569 rtx temp
= operands
[1];
1570 operands
[1] = operands
[2];
1574 /* If optimizing, copy to regs to improve CSE */
1575 if (TARGET_PSEUDO
&& optimize
&& ((reload_in_progress
| reload_completed
) == 0))
1577 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1578 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1580 if (GET_CODE (operands
[2]) == MEM
)
1581 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1583 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1585 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1586 emit_move_insn (temp
, operands
[1]);
1592 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1594 /* If not optimizing, try to make a valid insn (optimize code previously did
1595 this above to improve chances of CSE) */
1597 if ((!TARGET_PSEUDO
|| !optimize
)
1598 && ((reload_in_progress
| reload_completed
) == 0)
1599 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1602 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1604 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1608 if (GET_CODE (operands
[2]) == MEM
)
1610 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1614 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1616 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1617 emit_move_insn (temp
, operands
[1]);
1622 if (modified
&& !ix86_binary_operator_ok (code
, mode
, operands
))
1632 /* Return TRUE or FALSE depending on whether the binary operator meets the
1633 appropriate constraints. */
1636 ix86_binary_operator_ok (code
, mode
, operands
)
1638 enum machine_mode mode
;
1641 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1642 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1645 /* Attempt to expand a unary operator. Make the expansion closer to the
1646 actual machine, then just general_operand, which will allow 2 separate
1647 memory references (one output, one input) in a single insn. Return
1648 whether the insn fails, or succeeds. */
1651 ix86_expand_unary_operator (code
, mode
, operands
)
1653 enum machine_mode mode
;
1658 /* If optimizing, copy to regs to improve CSE */
1661 && ((reload_in_progress
| reload_completed
) == 0)
1662 && GET_CODE (operands
[1]) == MEM
)
1664 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1667 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1669 if ((!TARGET_PSEUDO
|| !optimize
)
1670 && ((reload_in_progress
| reload_completed
) == 0)
1671 && GET_CODE (operands
[1]) == MEM
)
1673 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1674 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1684 /* Return TRUE or FALSE depending on whether the unary operator meets the
1685 appropriate constraints. */
1688 ix86_unary_operator_ok (code
, mode
, operands
)
1690 enum machine_mode mode
;
1698 static rtx pic_label_rtx
;
1700 /* This function generates code for -fpic that loads %ebx with
1701 with the return address of the caller and then returns. */
1703 asm_output_function_prefix (file
, name
)
1708 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1709 || current_function_uses_const_pool
);
1710 xops
[0] = pic_offset_table_rtx
;
1711 xops
[1] = stack_pointer_rtx
;
1713 /* deep branch prediction favors having a return for every call */
1714 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1716 if (pic_label_rtx
== 0)
1717 pic_label_rtx
= (rtx
) gen_label_rtx ();
1718 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (pic_label_rtx
));
1719 output_asm_insn ("movl (%1),%0", xops
);
1720 output_asm_insn ("ret", xops
);
1724 /* Set up the stack and frame (if desired) for the function. */
1727 function_prologue (file
, size
)
1734 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1735 || current_function_uses_const_pool
);
1736 long tsize
= get_frame_size ();
1738 /* pic references don't explicitly mention pic_offset_table_rtx */
1739 if (TARGET_SCHEDULE_PROLOGUE
)
1742 xops
[0] = stack_pointer_rtx
;
1743 xops
[1] = frame_pointer_rtx
;
1744 xops
[2] = GEN_INT (tsize
);
1745 if (frame_pointer_needed
)
1747 output_asm_insn ("push%L1 %1", xops
);
1748 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
1752 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
1754 /* Note If use enter it is NOT reversed args.
1755 This one is not reversed from intel!!
1756 I think enter is slower. Also sdb doesn't like it.
1757 But if you want it the code is:
1759 xops[3] = const0_rtx;
1760 output_asm_insn ("enter %2,%3", xops);
1763 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1764 for (regno
= limit
- 1; regno
>= 0; regno
--)
1765 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1766 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1768 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1769 output_asm_insn ("push%L0 %0", xops
);
1772 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1774 xops
[0] = pic_offset_table_rtx
;
1775 if (pic_label_rtx
== 0)
1776 pic_label_rtx
= (rtx
) gen_label_rtx ();
1777 xops
[1] = pic_label_rtx
;
1779 output_asm_insn (AS1 (call
,%P1
), xops
);
1780 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1782 else if (pic_reg_used
)
1784 xops
[0] = pic_offset_table_rtx
;
1785 xops
[1] = (rtx
) gen_label_rtx ();
1787 output_asm_insn (AS1 (call
,%P1
), xops
);
1788 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (xops
[1]));
1789 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1790 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1794 /* This function generates the assembly code for function entry.
1795 FILE is an stdio stream to output the code to.
1796 SIZE is an int: how many units of temporary storage to allocate. */
1799 ix86_expand_prologue ()
1804 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1805 || current_function_uses_const_pool
);
1806 long tsize
= get_frame_size ();
1808 if (!TARGET_SCHEDULE_PROLOGUE
)
1811 xops
[0] = stack_pointer_rtx
;
1812 xops
[1] = frame_pointer_rtx
;
1813 xops
[2] = GEN_INT (tsize
);
1814 if (frame_pointer_needed
)
1816 emit_insn (gen_rtx (SET
, 0,
1817 gen_rtx (MEM
, SImode
,
1818 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1819 frame_pointer_rtx
));
1820 emit_move_insn (xops
[1], xops
[0]);
1824 emit_insn (gen_rtx (SET
, SImode
,
1826 gen_rtx (MINUS
, SImode
,
1830 /* Note If use enter it is NOT reversed args.
1831 This one is not reversed from intel!!
1832 I think enter is slower. Also sdb doesn't like it.
1833 But if you want it the code is:
1835 xops[3] = const0_rtx;
1836 output_asm_insn ("enter %2,%3", xops);
1839 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1840 for (regno
= limit
- 1; regno
>= 0; regno
--)
1841 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1842 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1844 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1845 emit_insn (gen_rtx (SET
, 0,
1846 gen_rtx (MEM
, SImode
,
1847 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1851 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1853 xops
[0] = pic_offset_table_rtx
;
1854 if (pic_label_rtx
== 0)
1855 pic_label_rtx
= (rtx
) gen_label_rtx ();
1856 xops
[1] = pic_label_rtx
;
1858 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1859 emit_insn (gen_prologue_set_got (xops
[0],
1860 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1861 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1863 else if (pic_reg_used
)
1865 xops
[0] = pic_offset_table_rtx
;
1866 xops
[1] = (rtx
) gen_label_rtx ();
1868 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
1869 emit_insn (gen_pop (xops
[0]));
1870 emit_insn (gen_prologue_set_got (xops
[0],
1871 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1872 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER (xops
[1]))));
1876 /* Restore function stack, frame, and registers. */
1879 function_epilogue (file
, size
)
1885 /* Return 1 if it is appropriate to emit `ret' instructions in the
1886 body of a function. Do this only if the epilogue is simple, needing a
1887 couple of insns. Prior to reloading, we can't tell how many registers
1888 must be saved, so return 0 then. Return 0 if there is no frame
1889 marker to de-allocate.
1891 If NON_SAVING_SETJMP is defined and true, then it is not possible
1892 for the epilogue to be simple, so return 0. This is a special case
1893 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1894 until final, but jump_optimize may need to know sooner if a
1898 ix86_can_use_return_insn_p ()
1902 int reglimit
= (frame_pointer_needed
1903 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1904 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1905 || current_function_uses_const_pool
);
1907 #ifdef NON_SAVING_SETJMP
1908 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1912 if (! reload_completed
)
1915 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
1916 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1917 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1920 return nregs
== 0 || ! frame_pointer_needed
;
1924 /* This function generates the assembly code for function exit.
1925 FILE is an stdio stream to output the code to.
1926 SIZE is an int: how many units of temporary storage to deallocate. */
1929 ix86_expand_epilogue ()
1932 register int nregs
, limit
;
1935 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1936 || current_function_uses_const_pool
);
1937 long tsize
= get_frame_size ();
1939 /* Compute the number of registers to pop */
1941 limit
= (frame_pointer_needed
1942 ? FRAME_POINTER_REGNUM
1943 : STACK_POINTER_REGNUM
);
1947 for (regno
= limit
- 1; regno
>= 0; regno
--)
1948 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1949 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1952 /* sp is often unreliable so we must go off the frame pointer,
1955 /* In reality, we may not care if sp is unreliable, because we can
1956 restore the register relative to the frame pointer. In theory,
1957 since each move is the same speed as a pop, and we don't need the
1958 leal, this is faster. For now restore multiple registers the old
1961 offset
= -tsize
- (nregs
* UNITS_PER_WORD
);
1963 xops
[2] = stack_pointer_rtx
;
1965 if (nregs
> 1 || ! frame_pointer_needed
)
1967 if (frame_pointer_needed
)
1969 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
1970 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
1971 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1974 for (regno
= 0; regno
< limit
; regno
++)
1975 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1976 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1978 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1979 emit_insn (gen_pop (xops
[0]));
1980 /* output_asm_insn ("pop%L0 %0", xops);*/
1984 for (regno
= 0; regno
< limit
; regno
++)
1985 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1986 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1988 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1989 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
1990 emit_move_insn (xops
[0], xops
[1]);
1991 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1995 if (frame_pointer_needed
)
1997 /* If not an i386, mov & pop is faster than "leave". */
1999 if (TARGET_USE_LEAVE
)
2000 emit_insn (gen_leave());
2001 /* output_asm_insn ("leave", xops);*/
2004 xops
[0] = frame_pointer_rtx
;
2005 xops
[1] = stack_pointer_rtx
;
2006 emit_insn (gen_epilogue_set_stack_ptr());
2007 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2008 emit_insn (gen_pop (xops
[0]));
2009 /* output_asm_insn ("pop%L0 %0", xops);*/
2014 /* If there is no frame pointer, we must still release the frame. */
2016 xops
[0] = GEN_INT (tsize
);
2017 emit_insn (gen_rtx (SET
, SImode
,
2019 gen_rtx (PLUS
, SImode
,
2022 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2025 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2026 if (profile_block_flag
== 2)
2028 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2032 if (current_function_pops_args
&& current_function_args_size
)
2034 xops
[1] = GEN_INT (current_function_pops_args
);
2036 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2037 asked to pop more, pop return address, do explicit add, and jump
2038 indirectly to the caller. */
2040 if (current_function_pops_args
>= 32768)
2042 /* ??? Which register to use here? */
2043 xops
[0] = gen_rtx (REG
, SImode
, 2);
2044 emit_insn (gen_pop (xops
[0]));
2045 /* output_asm_insn ("pop%L0 %0", xops);*/
2046 emit_insn (gen_rtx (SET
, SImode
,
2048 gen_rtx (PLUS
, SImode
,
2051 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2052 emit_jump_insn (xops
[0]);
2053 /* output_asm_insn ("jmp %*%0", xops);*/
2056 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2057 /* output_asm_insn ("ret %1", xops);*/
2060 /* output_asm_insn ("ret", xops);*/
2061 emit_jump_insn (gen_return_internal ());
2065 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2066 that is a valid memory address for an instruction.
2067 The MODE argument is the machine mode for the MEM expression
2068 that wants to use this address.
2070 On x86, legitimate addresses are:
2071 base movl (base),reg
2072 displacement movl disp,reg
2073 base + displacement movl disp(base),reg
2074 index + base movl (base,index),reg
2075 (index + base) + displacement movl disp(base,index),reg
2076 index*scale movl (,index,scale),reg
2077 index*scale + disp movl disp(,index,scale),reg
2078 index*scale + base movl (base,index,scale),reg
2079 (index*scale + base) + disp movl disp(base,index,scale),reg
2081 In each case, scale can be 1, 2, 4, 8. */
2083 /* This is exactly the same as print_operand_addr, except that
2084 it recognizes addresses instead of printing them.
2086 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2087 convert common non-canonical forms to canonical form so that they will
2090 #define ADDR_INVALID(msg,insn) \
2092 if (TARGET_DEBUG_ADDR) \
2094 fprintf (stderr, msg); \
2100 legitimate_address_p (mode
, addr
, strict
)
2101 enum machine_mode mode
;
2105 rtx base
= NULL_RTX
;
2106 rtx indx
= NULL_RTX
;
2107 rtx scale
= NULL_RTX
;
2108 rtx disp
= NULL_RTX
;
2110 if (TARGET_DEBUG_ADDR
)
2113 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2114 GET_MODE_NAME (mode
), strict
);
2119 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2120 base
= addr
; /* base reg */
2122 else if (GET_CODE (addr
) == PLUS
)
2124 rtx op0
= XEXP (addr
, 0);
2125 rtx op1
= XEXP (addr
, 1);
2126 enum rtx_code code0
= GET_CODE (op0
);
2127 enum rtx_code code1
= GET_CODE (op1
);
2129 if (code0
== REG
|| code0
== SUBREG
)
2131 if (code1
== REG
|| code1
== SUBREG
)
2133 indx
= op0
; /* index + base */
2139 base
= op0
; /* base + displacement */
2144 else if (code0
== MULT
)
2146 indx
= XEXP (op0
, 0);
2147 scale
= XEXP (op0
, 1);
2149 if (code1
== REG
|| code1
== SUBREG
)
2150 base
= op1
; /* index*scale + base */
2153 disp
= op1
; /* index*scale + disp */
2156 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2158 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2159 scale
= XEXP (XEXP (op0
, 0), 1);
2160 base
= XEXP (op0
, 1);
2164 else if (code0
== PLUS
)
2166 indx
= XEXP (op0
, 0); /* index + base + disp */
2167 base
= XEXP (op0
, 1);
2173 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2178 else if (GET_CODE (addr
) == MULT
)
2180 indx
= XEXP (addr
, 0); /* index*scale */
2181 scale
= XEXP (addr
, 1);
2185 disp
= addr
; /* displacement */
2187 /* Allow arg pointer and stack pointer as index if there is not scaling */
2188 if (base
&& indx
&& !scale
2189 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2196 /* Validate base register */
2197 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2198 is one word out of a two word structure, which is represented internally
2202 if (GET_CODE (base
) != REG
)
2204 ADDR_INVALID ("Base is not a register.\n", base
);
2208 if ((strict
&& !REG_OK_FOR_BASE_STRICT_P (base
))
2209 || (!strict
&& !REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2211 ADDR_INVALID ("Base is not valid.\n", base
);
2216 /* Validate index register */
2217 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2218 is one word out of a two word structure, which is represented internally
2222 if (GET_CODE (indx
) != REG
)
2224 ADDR_INVALID ("Index is not a register.\n", indx
);
2228 if ((strict
&& !REG_OK_FOR_INDEX_STRICT_P (indx
))
2229 || (!strict
&& !REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2231 ADDR_INVALID ("Index is not valid.\n", indx
);
2236 abort (); /* scale w/o index invalid */
2238 /* Validate scale factor */
2241 HOST_WIDE_INT value
;
2243 if (GET_CODE (scale
) != CONST_INT
)
2245 ADDR_INVALID ("Scale is not valid.\n", scale
);
2249 value
= INTVAL (scale
);
2250 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2252 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2257 /* Validate displacement
2258 Constant pool addresses must be handled special. They are
2259 considered legitimate addresses, but only if not used with regs.
2260 When printed, the output routines know to print the reference with the
2261 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2264 if (GET_CODE (disp
) == SYMBOL_REF
2265 && CONSTANT_POOL_ADDRESS_P (disp
)
2270 else if (!CONSTANT_ADDRESS_P (disp
))
2272 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2276 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2278 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2282 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2283 && base
!= pic_offset_table_rtx
2284 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2286 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2290 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2291 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2293 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp
);
2298 if (TARGET_DEBUG_ADDR
)
2299 fprintf (stderr
, "Address is valid.\n");
2301 /* Everything looks valid, return true */
2306 /* Return a legitimate reference for ORIG (an address) using the
2307 register REG. If REG is 0, a new pseudo is generated.
2309 There are three types of references that must be handled:
2311 1. Global data references must load the address from the GOT, via
2312 the PIC reg. An insn is emitted to do this load, and the reg is
2315 2. Static data references must compute the address as an offset
2316 from the GOT, whose base is in the PIC reg. An insn is emitted to
2317 compute the address into a reg, and the reg is returned. Static
2318 data objects have SYMBOL_REF_FLAG set to differentiate them from
2319 global data objects.
2321 3. Constant pool addresses must be handled special. They are
2322 considered legitimate addresses, but only if not used with regs.
2323 When printed, the output routines know to print the reference with the
2324 PIC reg, even though the PIC reg doesn't appear in the RTL.
2326 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2327 reg also appears in the address (except for constant pool references,
2330 "switch" statements also require special handling when generating
2331 PIC code. See comments by the `casesi' insn in i386.md for details. */
2334 legitimize_pic_address (orig
, reg
)
2341 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2343 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2348 reg
= gen_reg_rtx (Pmode
);
2350 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2351 || GET_CODE (addr
) == LABEL_REF
)
2352 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2354 new = gen_rtx (MEM
, Pmode
,
2355 gen_rtx (PLUS
, Pmode
,
2356 pic_offset_table_rtx
, orig
));
2358 emit_move_insn (reg
, new);
2360 current_function_uses_pic_offset_table
= 1;
2363 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2367 if (GET_CODE (addr
) == CONST
)
2369 addr
= XEXP (addr
, 0);
2370 if (GET_CODE (addr
) != PLUS
)
2374 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2378 reg
= gen_reg_rtx (Pmode
);
2380 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2381 addr
= legitimize_pic_address (XEXP (addr
, 1),
2382 base
== reg
? NULL_RTX
: reg
);
2384 if (GET_CODE (addr
) == CONST_INT
)
2385 return plus_constant (base
, INTVAL (addr
));
2387 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2389 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2390 addr
= XEXP (addr
, 1);
2392 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2398 /* Emit insns to move operands[1] into operands[0]. */
2401 emit_pic_move (operands
, mode
)
2403 enum machine_mode mode
;
2405 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2407 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2408 operands
[1] = (rtx
) force_reg (SImode
, operands
[1]);
2410 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2414 /* Try machine-dependent ways of modifying an illegitimate address
2415 to be legitimate. If we find one, return the new, valid address.
2416 This macro is used in only one place: `memory_address' in explow.c.
2418 OLDX is the address as it was before break_out_memory_refs was called.
2419 In some cases it is useful to look at this to decide what needs to be done.
2421 MODE and WIN are passed so that this macro can use
2422 GO_IF_LEGITIMATE_ADDRESS.
2424 It is always safe for this macro to do nothing. It exists to recognize
2425 opportunities to optimize the output.
2427 For the 80386, we handle X+REG by loading X into a register R and
2428 using R+REG. R will go in a general reg and indexing will be used.
2429 However, if REG is a broken-out memory address or multiplication,
2430 nothing needs to be done because REG can certainly go in a general reg.
2432 When -fpic is used, special handling is needed for symbolic references.
2433 See comments by legitimize_pic_address in i386.c for details. */
2436 legitimize_address (x
, oldx
, mode
)
2439 enum machine_mode mode
;
2444 if (TARGET_DEBUG_ADDR
)
2446 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode
));
2450 if (flag_pic
&& SYMBOLIC_CONST (x
))
2451 return legitimize_pic_address (x
, 0);
2453 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2454 if (GET_CODE (x
) == ASHIFT
2455 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2456 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2459 x
= gen_rtx (MULT
, Pmode
,
2460 force_reg (Pmode
, XEXP (x
, 0)),
2461 GEN_INT (1 << log
));
2464 if (GET_CODE (x
) == PLUS
)
2466 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2467 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2468 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2469 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2472 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2473 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2474 GEN_INT (1 << log
));
2477 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2478 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2479 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2482 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2483 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2484 GEN_INT (1 << log
));
2487 /* Put multiply first if it isn't already */
2488 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2490 rtx tmp
= XEXP (x
, 0);
2491 XEXP (x
, 0) = XEXP (x
, 1);
2496 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2497 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2498 created by virtual register instantiation, register elimination, and
2499 similar optimizations. */
2500 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2503 x
= gen_rtx (PLUS
, Pmode
,
2504 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)),
2505 XEXP (XEXP (x
, 1), 1));
2508 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2509 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2510 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2511 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2512 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2513 && CONSTANT_P (XEXP (x
, 1)))
2515 rtx constant
, other
;
2517 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2519 constant
= XEXP (x
, 1);
2520 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2522 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2524 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2525 other
= XEXP (x
, 1);
2533 x
= gen_rtx (PLUS
, Pmode
,
2534 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2535 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2536 plus_constant (other
, INTVAL (constant
)));
2540 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2543 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2546 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2549 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2552 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2556 && GET_CODE (XEXP (x
, 1)) == REG
2557 && GET_CODE (XEXP (x
, 0)) == REG
)
2560 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2563 x
= legitimize_pic_address (x
, 0);
2566 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2569 if (GET_CODE (XEXP (x
, 0)) == REG
)
2571 register rtx temp
= gen_reg_rtx (Pmode
);
2572 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2574 emit_move_insn (temp
, val
);
2580 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2582 register rtx temp
= gen_reg_rtx (Pmode
);
2583 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2585 emit_move_insn (temp
, val
);
2596 /* Print an integer constant expression in assembler syntax. Addition
2597 and subtraction are the only arithmetic that may appear in these
2598 expressions. FILE is the stdio stream to write to, X is the rtx, and
2599 CODE is the operand print code from the output string. */
2602 output_pic_addr_const (file
, x
, code
)
2609 switch (GET_CODE (x
))
2620 if (GET_CODE (x
) == SYMBOL_REF
)
2621 assemble_name (file
, XSTR (x
, 0));
2624 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2625 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2626 assemble_name (asm_out_file
, buf
);
2629 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2630 fprintf (file
, "@GOTOFF(%%ebx)");
2631 else if (code
== 'P')
2632 fprintf (file
, "@PLT");
2633 else if (GET_CODE (x
) == LABEL_REF
)
2634 fprintf (file
, "@GOTOFF");
2635 else if (! SYMBOL_REF_FLAG (x
))
2636 fprintf (file
, "@GOT");
2638 fprintf (file
, "@GOTOFF");
2643 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2644 assemble_name (asm_out_file
, buf
);
2648 fprintf (file
, "%d", INTVAL (x
));
2652 /* This used to output parentheses around the expression,
2653 but that does not work on the 386 (either ATT or BSD assembler). */
2654 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2658 if (GET_MODE (x
) == VOIDmode
)
2660 /* We can use %d if the number is <32 bits and positive. */
2661 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2662 fprintf (file
, "0x%x%08x",
2663 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2665 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2668 /* We can't handle floating point constants;
2669 PRINT_OPERAND must handle them. */
2670 output_operand_lossage ("floating constant misused");
2674 /* Some assemblers need integer constants to appear last (eg masm). */
2675 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
2677 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2678 if (INTVAL (XEXP (x
, 0)) >= 0)
2679 fprintf (file
, "+");
2680 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2684 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2685 if (INTVAL (XEXP (x
, 1)) >= 0)
2686 fprintf (file
, "+");
2687 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2692 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2693 fprintf (file
, "-");
2694 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2698 output_operand_lossage ("invalid expression as operand");
2702 /* Append the correct conditional move suffix which corresponds to CODE */
2705 put_condition_code (code
, mode
, file
)
2707 enum mode_class mode
;
2710 if (mode
== MODE_INT
)
2714 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2720 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2726 fputs ("ge", file
); return;
2728 fputs ("g", file
); return;
2730 fputs ("le", file
); return;
2732 fputs ("l", file
); return;
2734 fputs ("ae", file
); return;
2736 fputs ("a", file
); return;
2738 fputs ("be", file
); return;
2740 fputs ("b", file
); return;
2741 default: output_operand_lossage ("Invalid %%C operand");
2743 else if (mode
== MODE_FLOAT
)
2747 fputs ("ne", file
); return;
2749 fputs ("e", file
); return;
2751 fputs ("nb", file
); return;
2753 fputs ("nbe", file
); return;
2755 fputs ("be", file
); return;
2757 fputs ("b", file
); return;
2759 fputs ("nb", file
); return;
2761 fputs ("nbe", file
); return;
2763 fputs ("be", file
); return;
2765 fputs ("b", file
); return;
2766 default: output_operand_lossage ("Invalid %%C operand");
2771 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2772 C -- print opcode suffix for set/cmov insn.
2773 c -- like C, but print reversed condition
2774 F -- print opcode suffix for fcmov insn.
2775 f -- like C, but print reversed condition
2776 R -- print the prefix for register names.
2777 z -- print the opcode suffix for the size of the current operand.
2778 * -- print a star (in certain assembler syntax)
2779 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2780 c -- don't print special prefixes before constant operands.
2781 J -- print the appropriate jump operand.
2782 s -- print a shift double count, followed by the assemblers argument
2784 b -- print the QImode name of the register for the indicated operand.
2785 %b0 would print %al if operands[0] is reg 0.
2786 w -- likewise, print the HImode name of the register.
2787 k -- likewise, print the SImode name of the register.
2788 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2789 y -- print "st(0)" instead of "st" as a register.
2790 P -- print as a PIC constant
2794 print_operand (file
, x
, code
)
2809 PUT_OP_SIZE (code
, 'l', file
);
2813 PUT_OP_SIZE (code
, 'w', file
);
2817 PUT_OP_SIZE (code
, 'b', file
);
2821 PUT_OP_SIZE (code
, 'l', file
);
2825 PUT_OP_SIZE (code
, 's', file
);
2829 PUT_OP_SIZE (code
, 't', file
);
2833 /* 387 opcodes don't get size suffixes if the operands are
2836 if (STACK_REG_P (x
))
2839 /* this is the size of op from size of operand */
2840 switch (GET_MODE_SIZE (GET_MODE (x
)))
2843 PUT_OP_SIZE ('B', 'b', file
);
2847 PUT_OP_SIZE ('W', 'w', file
);
2851 if (GET_MODE (x
) == SFmode
)
2853 PUT_OP_SIZE ('S', 's', file
);
2857 PUT_OP_SIZE ('L', 'l', file
);
2861 PUT_OP_SIZE ('T', 't', file
);
2865 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
2867 #ifdef GAS_MNEMONICS
2868 PUT_OP_SIZE ('Q', 'q', file
);
2871 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
2875 PUT_OP_SIZE ('Q', 'l', file
);
2888 switch (GET_CODE (x
))
2890 /* These conditions are appropriate for testing the result
2891 of an arithmetic operation, not for a compare operation.
2892 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2893 CC_Z_IN_NOT_C false and not floating point. */
2894 case NE
: fputs ("jne", file
); return;
2895 case EQ
: fputs ("je", file
); return;
2896 case GE
: fputs ("jns", file
); return;
2897 case LT
: fputs ("js", file
); return;
2898 case GEU
: fputs ("jmp", file
); return;
2899 case GTU
: fputs ("jne", file
); return;
2900 case LEU
: fputs ("je", file
); return;
2901 case LTU
: fputs ("#branch never", file
); return;
2903 /* no matching branches for GT nor LE */
2908 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
2910 PRINT_OPERAND (file
, x
, 0);
2911 fputs (AS2C (,) + 1, file
);
2915 /* This is used by the conditional move instructions. */
2917 put_condition_code (GET_CODE (x
), MODE_INT
, file
);
2920 /* like above, but reverse condition */
2922 put_condition_code (reverse_condition (GET_CODE (x
)), MODE_INT
, file
);
2926 put_condition_code (GET_CODE (x
), MODE_FLOAT
, file
);
2929 /* like above, but reverse condition */
2931 put_condition_code (reverse_condition (GET_CODE (x
)),
2939 sprintf (str
, "invalid operand code `%c'", code
);
2940 output_operand_lossage (str
);
2944 if (GET_CODE (x
) == REG
)
2946 PRINT_REG (x
, code
, file
);
2948 else if (GET_CODE (x
) == MEM
)
2950 PRINT_PTR (x
, file
);
2951 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
2954 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2956 output_addr_const (file
, XEXP (x
, 0));
2959 output_address (XEXP (x
, 0));
2961 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
2963 REAL_VALUE_TYPE r
; long l
;
2964 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2965 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
2966 PRINT_IMMED_PREFIX (file
);
2967 fprintf (file
, "0x%x", l
);
2969 /* These float cases don't actually occur as immediate operands. */
2970 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
2972 REAL_VALUE_TYPE r
; char dstr
[30];
2973 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2974 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
2975 fprintf (file
, "%s", dstr
);
2977 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
2979 REAL_VALUE_TYPE r
; char dstr
[30];
2980 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2981 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
2982 fprintf (file
, "%s", dstr
);
2988 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
2989 PRINT_IMMED_PREFIX (file
);
2990 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
2991 || GET_CODE (x
) == LABEL_REF
)
2992 PRINT_OFFSET_PREFIX (file
);
2995 output_pic_addr_const (file
, x
, code
);
2997 output_addr_const (file
, x
);
3001 /* Print a memory operand whose address is ADDR. */
3004 print_operand_address (file
, addr
)
3008 register rtx reg1
, reg2
, breg
, ireg
;
3011 switch (GET_CODE (addr
))
3015 fprintf (file
, "%se", RP
);
3016 fputs (hi_reg_name
[REGNO (addr
)], file
);
3026 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3028 offset
= XEXP (addr
, 0);
3029 addr
= XEXP (addr
, 1);
3031 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3033 offset
= XEXP (addr
, 1);
3034 addr
= XEXP (addr
, 0);
3036 if (GET_CODE (addr
) != PLUS
) ;
3037 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3039 reg1
= XEXP (addr
, 0);
3040 addr
= XEXP (addr
, 1);
3042 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3044 reg1
= XEXP (addr
, 1);
3045 addr
= XEXP (addr
, 0);
3047 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3049 reg1
= XEXP (addr
, 0);
3050 addr
= XEXP (addr
, 1);
3052 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3054 reg1
= XEXP (addr
, 1);
3055 addr
= XEXP (addr
, 0);
3057 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3059 if (reg1
== 0) reg1
= addr
;
3065 if (addr
!= 0) abort ();
3068 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3069 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3074 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3080 if (ireg
!= 0 || breg
!= 0)
3087 output_pic_addr_const (file
, addr
, 0);
3089 else if (GET_CODE (addr
) == LABEL_REF
)
3090 output_asm_label (addr
);
3093 output_addr_const (file
, addr
);
3096 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3098 scale
= INTVAL (XEXP (ireg
, 1));
3099 ireg
= XEXP (ireg
, 0);
3102 /* The stack pointer can only appear as a base register,
3103 never an index register, so exchange the regs if it is wrong. */
3105 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3114 /* output breg+ireg*scale */
3115 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3122 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3124 scale
= INTVAL (XEXP (addr
, 0));
3125 ireg
= XEXP (addr
, 1);
3129 scale
= INTVAL (XEXP (addr
, 1));
3130 ireg
= XEXP (addr
, 0);
3132 output_addr_const (file
, const0_rtx
);
3133 PRINT_B_I_S ((rtx
) 0, ireg
, scale
, file
);
3138 if (GET_CODE (addr
) == CONST_INT
3139 && INTVAL (addr
) < 0x8000
3140 && INTVAL (addr
) >= -0x8000)
3141 fprintf (file
, "%d", INTVAL (addr
));
3145 output_pic_addr_const (file
, addr
, 0);
3147 output_addr_const (file
, addr
);
3152 /* Set the cc_status for the results of an insn whose pattern is EXP.
3153 On the 80386, we assume that only test and compare insns, as well
3154 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3155 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3156 Also, we assume that jumps, moves and sCOND don't affect the condition
3157 codes. All else clobbers the condition codes, by assumption.
3159 We assume that ALL integer add, minus, etc. instructions effect the
3160 condition codes. This MUST be consistent with i386.md.
3162 We don't record any float test or compare - the redundant test &
3163 compare check in final.c does not handle stack-like regs correctly. */
3166 notice_update_cc (exp
)
3169 if (GET_CODE (exp
) == SET
)
3171 /* Jumps do not alter the cc's. */
3172 if (SET_DEST (exp
) == pc_rtx
)
3174 #ifdef IS_STACK_MODE
3175 /* Moving into a memory of stack_mode may have been moved
3176 in between the use and set of cc0 by loop_spl(). So
3177 old value of cc.status must be retained */
3178 if(GET_CODE(SET_DEST(exp
))==MEM
3179 && IS_STACK_MODE(GET_MODE(SET_DEST(exp
))))
3184 /* Moving register or memory into a register:
3185 it doesn't alter the cc's, but it might invalidate
3186 the RTX's which we remember the cc's came from.
3187 (Note that moving a constant 0 or 1 MAY set the cc's). */
3188 if (REG_P (SET_DEST (exp
))
3189 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3190 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3192 if (cc_status
.value1
3193 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3194 cc_status
.value1
= 0;
3195 if (cc_status
.value2
3196 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3197 cc_status
.value2
= 0;
3200 /* Moving register into memory doesn't alter the cc's.
3201 It may invalidate the RTX's which we remember the cc's came from. */
3202 if (GET_CODE (SET_DEST (exp
)) == MEM
3203 && (REG_P (SET_SRC (exp
))
3204 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3206 if (cc_status
.value1
&& GET_CODE (cc_status
.value1
) == MEM
3207 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3208 cc_status
.value1
= 0;
3209 if (cc_status
.value2
&& GET_CODE (cc_status
.value2
) == MEM
3210 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3211 cc_status
.value2
= 0;
3214 /* Function calls clobber the cc's. */
3215 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3220 /* Tests and compares set the cc's in predictable ways. */
3221 else if (SET_DEST (exp
) == cc0_rtx
)
3224 cc_status
.value1
= SET_SRC (exp
);
3227 /* Certain instructions effect the condition codes. */
3228 else if (GET_MODE (SET_SRC (exp
)) == SImode
3229 || GET_MODE (SET_SRC (exp
)) == HImode
3230 || GET_MODE (SET_SRC (exp
)) == QImode
)
3231 switch (GET_CODE (SET_SRC (exp
)))
3233 case ASHIFTRT
: case LSHIFTRT
:
3235 /* Shifts on the 386 don't set the condition codes if the
3236 shift count is zero. */
3237 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3242 /* We assume that the CONST_INT is non-zero (this rtx would
3243 have been deleted if it were zero. */
3245 case PLUS
: case MINUS
: case NEG
:
3246 case AND
: case IOR
: case XOR
:
3247 cc_status
.flags
= CC_NO_OVERFLOW
;
3248 cc_status
.value1
= SET_SRC (exp
);
3249 cc_status
.value2
= SET_DEST (exp
);
3260 else if (GET_CODE (exp
) == PARALLEL
3261 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3263 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3265 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3268 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3269 cc_status
.flags
|= CC_IN_80387
;
3271 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3282 /* Split one or more DImode RTL references into pairs of SImode
3283 references. The RTL can be REG, offsettable MEM, integer constant, or
3284 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3285 split and "num" is its length. lo_half and hi_half are output arrays
3286 that parallel "operands". */
3289 split_di (operands
, num
, lo_half
, hi_half
)
3292 rtx lo_half
[], hi_half
[];
3296 if (GET_CODE (operands
[num
]) == REG
)
3298 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3299 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3301 else if (CONSTANT_P (operands
[num
]))
3303 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3305 else if (offsettable_memref_p (operands
[num
]))
3307 lo_half
[num
] = operands
[num
];
3308 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3315 /* Return 1 if this is a valid binary operation on a 387.
3316 OP is the expression matched, and MODE is its mode. */
3319 binary_387_op (op
, mode
)
3321 enum machine_mode mode
;
3323 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3326 switch (GET_CODE (op
))
3332 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3340 /* Return 1 if this is a valid shift or rotate operation on a 386.
3341 OP is the expression matched, and MODE is its mode. */
3346 enum machine_mode mode
;
3348 rtx operand
= XEXP (op
, 0);
3350 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3353 if (GET_MODE (operand
) != GET_MODE (op
)
3354 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3357 return (GET_CODE (op
) == ASHIFT
3358 || GET_CODE (op
) == ASHIFTRT
3359 || GET_CODE (op
) == LSHIFTRT
3360 || GET_CODE (op
) == ROTATE
3361 || GET_CODE (op
) == ROTATERT
);
3364 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3365 MODE is not used. */
3368 VOIDmode_compare_op (op
, mode
)
3370 enum machine_mode mode
;
3372 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3375 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3376 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3377 is the expression of the binary operation. The output may either be
3378 emitted here, or returned to the caller, like all output_* functions.
3380 There is no guarantee that the operands are the same mode, as they
3381 might be within FLOAT or FLOAT_EXTEND expressions. */
3384 output_387_binary_op (insn
, operands
)
3390 static char buf
[100];
3392 switch (GET_CODE (operands
[3]))
3395 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3396 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3403 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3404 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3411 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3412 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3419 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3420 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3430 strcpy (buf
, base_op
);
3432 switch (GET_CODE (operands
[3]))
3436 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3439 operands
[2] = operands
[1];
3443 if (GET_CODE (operands
[2]) == MEM
)
3444 return strcat (buf
, AS1 (%z2
,%2));
3446 if (NON_STACK_REG_P (operands
[1]))
3448 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3451 else if (NON_STACK_REG_P (operands
[2]))
3453 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3457 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3458 return strcat (buf
, AS2 (p
,%2,%0));
3460 if (STACK_TOP_P (operands
[0]))
3461 return strcat (buf
, AS2C (%y2
,%0));
3463 return strcat (buf
, AS2C (%2,%0));
3467 if (GET_CODE (operands
[1]) == MEM
)
3468 return strcat (buf
, AS1 (r
%z1
,%1));
3470 if (GET_CODE (operands
[2]) == MEM
)
3471 return strcat (buf
, AS1 (%z2
,%2));
3473 if (NON_STACK_REG_P (operands
[1]))
3475 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3478 else if (NON_STACK_REG_P (operands
[2]))
3480 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3484 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3487 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3488 return strcat (buf
, AS2 (rp
,%2,%0));
3490 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3491 return strcat (buf
, AS2 (p
,%1,%0));
3493 if (STACK_TOP_P (operands
[0]))
3495 if (STACK_TOP_P (operands
[1]))
3496 return strcat (buf
, AS2C (%y2
,%0));
3498 return strcat (buf
, AS2 (r
,%y1
,%0));
3500 else if (STACK_TOP_P (operands
[1]))
3501 return strcat (buf
, AS2C (%1,%0));
3503 return strcat (buf
, AS2 (r
,%2,%0));
3510 /* Output code for INSN to convert a float to a signed int. OPERANDS
3511 are the insn operands. The output may be SFmode or DFmode and the
3512 input operand may be SImode or DImode. As a special case, make sure
3513 that the 387 stack top dies if the output mode is DImode, because the
3514 hardware requires this. */
3517 output_fix_trunc (insn
, operands
)
3521 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3524 if (! STACK_TOP_P (operands
[1]) ||
3525 (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3528 xops
[0] = GEN_INT (12);
3529 xops
[1] = operands
[4];
3531 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3532 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3533 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3534 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3535 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3537 if (NON_STACK_REG_P (operands
[0]))
3538 output_to_reg (operands
[0], stack_top_dies
);
3539 else if (GET_CODE (operands
[0]) == MEM
)
3542 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3544 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3549 return AS1 (fldc
%W2
,%2);
3552 /* Output code for INSN to compare OPERANDS. The two operands might
3553 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3554 expression. If the compare is in mode CCFPEQmode, use an opcode that
3555 will not fault if a qNaN is present. */
3558 output_float_compare (insn
, operands
)
3563 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3564 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3565 int target_fcomi
= TARGET_CMOVE
&& STACK_REG_P (operands
[1]);
3568 if (! STACK_TOP_P (operands
[0]))
3571 operands
[0] = operands
[1];
3573 cc_status
.flags
|= CC_REVERSED
;
3576 if (! STACK_TOP_P (operands
[0]))
3579 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3581 if (STACK_REG_P (operands
[1])
3583 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3584 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3586 /* If both the top of the 387 stack dies, and the other operand
3587 is also a stack register that dies, then this must be a
3588 `fcompp' float compare */
3590 if (unordered_compare
)
3591 output_asm_insn ("fucompp", operands
);
3593 output_asm_insn ("fcompp", operands
);
3597 static char buf
[100];
3599 /* Decide if this is the integer or float compare opcode, or the
3600 unordered float compare. */
3602 if (unordered_compare
)
3603 strcpy (buf
, target_fcomi
? "fucomi" : "fucom");
3604 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
3605 strcpy (buf
, target_fcomi
? "fcomi" : "fcom");
3607 strcpy (buf
, "ficom");
3609 /* Modify the opcode if the 387 stack is to be popped. */
3614 if (NON_STACK_REG_P (operands
[1]))
3615 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3616 else if (target_fcomi
)
3620 xops
[0] = operands
[0];
3621 xops
[1] = operands
[1];
3622 xops
[2] = operands
[0];
3624 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%2)), xops
);
3628 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
3631 /* Now retrieve the condition code. */
3633 return output_fp_cc0_set (insn
);
3636 /* Output opcodes to transfer the results of FP compare or test INSN
3637 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3638 result of the compare or test is unordered, no comparison operator
3639 succeeds except NE. Return an output template, if any. */
3642 output_fp_cc0_set (insn
)
3646 rtx unordered_label
;
3650 xops
[0] = gen_rtx (REG
, HImode
, 0);
3651 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
3653 if (! TARGET_IEEE_FP
)
3655 if (!(cc_status
.flags
& CC_REVERSED
))
3657 next
= next_cc0_user (insn
);
3659 if (GET_CODE (next
) == JUMP_INSN
3660 && GET_CODE (PATTERN (next
)) == SET
3661 && SET_DEST (PATTERN (next
)) == pc_rtx
3662 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3664 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3666 else if (GET_CODE (PATTERN (next
)) == SET
)
3668 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3674 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
3675 || code
== LE
|| code
== GE
)
3676 { /* We will test eax directly */
3677 cc_status
.flags
|= CC_TEST_AX
;
3684 next
= next_cc0_user (insn
);
3685 if (next
== NULL_RTX
)
3688 if (GET_CODE (next
) == JUMP_INSN
3689 && GET_CODE (PATTERN (next
)) == SET
3690 && SET_DEST (PATTERN (next
)) == pc_rtx
3691 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3693 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3695 else if (GET_CODE (PATTERN (next
)) == SET
)
3697 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3698 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3699 else code
= GET_CODE (SET_SRC (PATTERN (next
)));
3704 xops
[0] = gen_rtx (REG
, QImode
, 0);
3709 xops
[1] = GEN_INT (0x45);
3710 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3715 xops
[1] = GEN_INT (0x45);
3716 xops
[2] = GEN_INT (0x01);
3717 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3718 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3723 xops
[1] = GEN_INT (0x05);
3724 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3729 xops
[1] = GEN_INT (0x45);
3730 xops
[2] = GEN_INT (0x40);
3731 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3732 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
3733 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3738 xops
[1] = GEN_INT (0x45);
3739 xops
[2] = GEN_INT (0x40);
3740 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3741 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3746 xops
[1] = GEN_INT (0x44);
3747 xops
[2] = GEN_INT (0x40);
3748 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3749 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
3763 #define MAX_386_STACK_LOCALS 2
3765 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3767 /* Define the structure for the machine field in struct function. */
3768 struct machine_function
3770 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3773 /* Functions to save and restore i386_stack_locals.
3774 These will be called, via pointer variables,
3775 from push_function_context and pop_function_context. */
3778 save_386_machine_status (p
)
3781 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
3782 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
3783 sizeof i386_stack_locals
);
3787 restore_386_machine_status (p
)
3790 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
3791 sizeof i386_stack_locals
);
3795 /* Clear stack slot assignments remembered from previous functions.
3796 This is called from INIT_EXPANDERS once before RTL is emitted for each
3800 clear_386_stack_locals ()
3802 enum machine_mode mode
;
3805 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
3806 mode
= (enum machine_mode
) ((int) mode
+ 1))
3807 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
3808 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
3810 /* Arrange to save and restore i386_stack_locals around nested functions. */
3811 save_machine_status
= save_386_machine_status
;
3812 restore_machine_status
= restore_386_machine_status
;
3815 /* Return a MEM corresponding to a stack slot with mode MODE.
3816 Allocate a new slot if necessary.
3818 The RTL for a function can have several slots available: N is
3819 which slot to use. */
3822 assign_386_stack_local (mode
, n
)
3823 enum machine_mode mode
;
3826 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
3829 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
3830 i386_stack_locals
[(int) mode
][n
]
3831 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
3833 return i386_stack_locals
[(int) mode
][n
];
3839 enum machine_mode mode
;
3841 return (GET_CODE (op
) == MULT
);
3846 enum machine_mode mode
;
3848 return (GET_CODE (op
) == DIV
);
3853 /* Create a new copy of an rtx.
3854 Recursively copies the operands of the rtx,
3855 except for those few rtx codes that are sharable.
3856 Doesn't share CONST */
3864 register RTX_CODE code
;
3865 register char *format_ptr
;
3867 code
= GET_CODE (orig
);
3880 /* SCRATCH must be shared because they represent distinct values. */
3885 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3886 a LABEL_REF, it isn't sharable. */
3887 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3888 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
3889 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
3893 /* A MEM with a constant address is not sharable. The problem is that
3894 the constant address may need to be reloaded. If the mem is shared,
3895 then reloading one copy of this mem will cause all copies to appear
3896 to have been reloaded. */
3899 copy
= rtx_alloc (code
);
3900 PUT_MODE (copy
, GET_MODE (orig
));
3901 copy
->in_struct
= orig
->in_struct
;
3902 copy
->volatil
= orig
->volatil
;
3903 copy
->unchanging
= orig
->unchanging
;
3904 copy
->integrated
= orig
->integrated
;
3906 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
3908 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
3910 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
3912 switch (*format_ptr
++)
3915 XEXP (copy
, i
) = XEXP (orig
, i
);
3916 if (XEXP (orig
, i
) != NULL
)
3917 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
3922 XEXP (copy
, i
) = XEXP (orig
, i
);
3927 XVEC (copy
, i
) = XVEC (orig
, i
);
3928 if (XVEC (orig
, i
) != NULL
)
3930 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
3931 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
3932 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
3937 XWINT (copy
, i
) = XWINT (orig
, i
);
3941 XINT (copy
, i
) = XINT (orig
, i
);
3946 XSTR (copy
, i
) = XSTR (orig
, i
);
3957 /* try to rewrite a memory address to make it valid */
3959 rewrite_address (mem_rtx
)
3962 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
3964 int offset_adjust
= 0;
3965 int was_only_offset
= 0;
3966 rtx mem_addr
= XEXP (mem_rtx
, 0);
3967 char *storage
= (char *) oballoc (0);
3969 int is_spill_rtx
= 0;
3971 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
3972 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
3974 if (GET_CODE (mem_addr
) == PLUS
&&
3975 GET_CODE (XEXP (mem_addr
, 1)) == PLUS
&&
3976 GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
3977 { /* this part is utilized by the combiner */
3979 gen_rtx (PLUS
, GET_MODE (mem_addr
),
3980 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
3982 XEXP (XEXP (mem_addr
, 1), 0)),
3983 XEXP (XEXP (mem_addr
, 1), 1));
3984 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
3986 XEXP (mem_rtx
, 0) = ret_rtx
;
3987 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
3993 /* this part is utilized by loop.c */
3994 /* If the address contains PLUS (reg,const) and this pattern is invalid
3995 in this case - try to rewrite the address to make it valid intel1
3997 storage
= (char *) oballoc (0);
3998 index_rtx
= base_rtx
= offset_rtx
= NULL
;
3999 /* find the base index and offset elements of the memory address */
4000 if (GET_CODE (mem_addr
) == PLUS
)
4002 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4004 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4006 base_rtx
= XEXP (mem_addr
, 1);
4007 index_rtx
= XEXP (mem_addr
, 0);
4011 base_rtx
= XEXP (mem_addr
, 0);
4012 offset_rtx
= XEXP (mem_addr
, 1);
4015 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4017 index_rtx
= XEXP (mem_addr
, 0);
4018 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4020 base_rtx
= XEXP (mem_addr
, 1);
4024 offset_rtx
= XEXP (mem_addr
, 1);
4027 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4030 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
&&
4031 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
&&
4032 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0)) == REG
&&
4033 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1)) == CONST_INT
&&
4034 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1)) == CONST_INT
&&
4035 GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
&&
4036 GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4038 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4039 offset_rtx
= XEXP (mem_addr
, 1);
4040 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4041 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4045 offset_rtx
= XEXP (mem_addr
, 1);
4046 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4047 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4050 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4052 was_only_offset
= 1;
4055 offset_rtx
= XEXP (mem_addr
, 1);
4056 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4057 if (offset_adjust
== 0)
4059 XEXP (mem_rtx
, 0) = offset_rtx
;
4060 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4070 else if (GET_CODE (mem_addr
) == MULT
)
4072 index_rtx
= mem_addr
;
4079 if (index_rtx
&& GET_CODE (index_rtx
) == MULT
)
4081 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4086 scale_rtx
= XEXP (index_rtx
, 1);
4087 scale
= INTVAL (scale_rtx
);
4088 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4090 /* now find which of the elements are invalid and try to fix them */
4091 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4093 offset_adjust
= INTVAL (index_rtx
) * scale
;
4094 if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST
&&
4095 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4097 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4098 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4100 offset_rtx
= copy_all_rtx (offset_rtx
);
4101 XEXP (XEXP (offset_rtx
, 0), 1) =
4102 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4103 if (!CONSTANT_P (offset_rtx
))
4110 else if (offset_rtx
&& GET_CODE (offset_rtx
) == SYMBOL_REF
)
4113 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4114 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4116 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4117 if (!CONSTANT_P (offset_rtx
))
4123 else if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST_INT
)
4125 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4127 else if (!offset_rtx
)
4129 offset_rtx
= gen_rtx (CONST_INT
, 0, 0);
4131 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4132 XEXP (mem_rtx
, 0) = offset_rtx
;
4135 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
&&
4136 GET_CODE (XEXP (base_rtx
, 0)) == REG
&&
4137 GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4139 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4140 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4142 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4144 offset_adjust
+= INTVAL (base_rtx
);
4147 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
&&
4148 GET_CODE (XEXP (index_rtx
, 0)) == REG
&&
4149 GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4151 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4152 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4156 if (!LEGITIMATE_INDEX_P (index_rtx
)
4157 && !(index_rtx
== stack_pointer_rtx
&& scale
== 1 && base_rtx
== NULL
))
4165 if (!LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4171 if (offset_adjust
!= 0)
4175 if (GET_CODE (offset_rtx
) == CONST
&&
4176 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4178 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4179 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4181 offset_rtx
= copy_all_rtx (offset_rtx
);
4182 XEXP (XEXP (offset_rtx
, 0), 1) =
4183 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4184 if (!CONSTANT_P (offset_rtx
))
4191 else if (GET_CODE (offset_rtx
) == SYMBOL_REF
)
4194 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4195 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4197 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4198 if (!CONSTANT_P (offset_rtx
))
4204 else if (GET_CODE (offset_rtx
) == CONST_INT
)
4206 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4216 offset_rtx
= gen_rtx (CONST_INT
, 0, offset_adjust
);
4224 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4225 INTVAL (offset_rtx
) == 0)
4227 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4228 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4234 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4235 gen_rtx (PLUS
, GET_MODE (base_rtx
),
4236 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4244 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4245 INTVAL (offset_rtx
) == 0)
4247 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, base_rtx
);
4251 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4252 gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
,
4262 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4263 INTVAL (offset_rtx
) == 0)
4265 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
, scale_rtx
);
4270 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4271 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4278 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4279 INTVAL (offset_rtx
) == 0)
4281 ret_rtx
= index_rtx
;
4285 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, offset_rtx
);
4294 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4295 INTVAL (offset_rtx
) == 0)
4301 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
, offset_rtx
);
4304 else if (was_only_offset
)
4306 ret_rtx
= offset_rtx
;
4314 XEXP (mem_rtx
, 0) = ret_rtx
;
4315 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4327 /* return 1 if the first insn to set cc before insn also sets the register
4328 reg_rtx - otherwise return 0 */
4330 last_to_set_cc (reg_rtx
, insn
)
4333 rtx prev_insn
= PREV_INSN (insn
);
4337 if (GET_CODE (prev_insn
) == NOTE
)
4340 else if (GET_CODE (prev_insn
) == INSN
)
4342 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4345 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4347 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4353 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4360 prev_insn
= PREV_INSN (prev_insn
);
4368 doesnt_set_condition_code (pat
)
4371 switch (GET_CODE (pat
))
4385 sets_condition_code (pat
)
4388 switch (GET_CODE (pat
))
4412 str_immediate_operand (op
, mode
)
4414 enum machine_mode mode
;
4416 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4428 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4429 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4430 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4431 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4440 Return 1 if the mode of the SET_DEST of insn is floating point
4441 and it is not an fld or a move from memory to memory.
4442 Otherwise return 0 */
4447 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4448 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4449 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4450 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4451 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4452 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4453 && GET_CODE (SET_SRC (insn
)) != MEM
)
4462 Return 1 if the mode of the SET_DEST floating point and is memory
4463 and the source is a register.
4469 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4470 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4471 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4472 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4473 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4474 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4484 Return 1 if dep_insn sets a register which insn uses as a base
4485 or index to reference memory.
4486 otherwise return 0 */
4489 agi_dependent (insn
, dep_insn
)
4492 if (GET_CODE (dep_insn
) == INSN
4493 && GET_CODE (PATTERN (dep_insn
)) == SET
4494 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4496 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
));
4499 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4500 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4501 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4502 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4504 return (reg_mentioned_in_mem (stack_pointer_rtx
, insn
));
4512 Return 1 if reg is used in rtl as a base or index for a memory ref
4513 otherwise return 0. */
4516 reg_mentioned_in_mem (reg
, rtl
)
4521 register enum rtx_code code
;
4526 code
= GET_CODE (rtl
);
4544 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4547 fmt
= GET_RTX_FORMAT (code
);
4548 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4553 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4555 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4560 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4567 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4569 operands[0] = result, initialized with the startaddress
4570 operands[1] = alignment of the address.
4571 operands[2] = scratch register, initialized with the startaddress when
4572 not aligned, otherwise undefined
4574 This is just the body. It needs the initialisations mentioned above and
4575 some address computing at the end. These things are done in i386.md. */
4578 output_strlen_unroll (operands
)
4583 xops
[0] = operands
[0]; /* Result */
4584 /* operands[1]; * Alignment */
4585 xops
[1] = operands
[2]; /* Scratch */
4586 xops
[2] = GEN_INT (0);
4587 xops
[3] = GEN_INT (2);
4588 xops
[4] = GEN_INT (3);
4589 xops
[5] = GEN_INT (4);
4590 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4591 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4592 xops
[8] = gen_label_rtx (); /* label of main loop */
4593 if(TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4594 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4595 xops
[10] = gen_label_rtx (); /* end label 2 */
4596 xops
[11] = gen_label_rtx (); /* end label 1 */
4597 xops
[12] = gen_label_rtx (); /* end label */
4598 /* xops[13] * Temporary used */
4599 xops
[14] = GEN_INT (0xff);
4600 xops
[15] = GEN_INT (0xff00);
4601 xops
[16] = GEN_INT (0xff0000);
4602 xops
[17] = GEN_INT (0xff000000);
4604 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4606 /* is there a known alignment and is it less then 4 */
4607 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4609 /* is there a known alignment and is it not 2 */
4610 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4612 xops
[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4613 xops
[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4615 /* leave just the 3 lower bits */
4616 /* if this is a q-register, then the high part is used later */
4617 /* therefore user andl rather than andb */
4618 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4619 /* is aligned to 4-byte adress when zero */
4620 output_asm_insn (AS1 (je
,%l8
), xops
);
4621 /* side-effect even Parity when %eax == 3 */
4622 output_asm_insn (AS1 (jp
,%6), xops
);
4624 /* is it aligned to 2 bytes ? */
4625 if (QI_REG_P (xops
[1]))
4626 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4628 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4629 output_asm_insn (AS1 (je
,%7), xops
);
4633 /* since the alignment is 2, we have to check 2 or 0 bytes */
4635 /* check if is aligned to 4 - byte */
4636 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4637 /* is aligned to 4-byte adress when zero */
4638 output_asm_insn (AS1 (je
,%l8
), xops
);
4641 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4642 /* now, compare the bytes */
4643 /* compare with the high part of a q-reg gives shorter code */
4644 if (QI_REG_P (xops
[1]))
4646 /* compare the first n unaligned byte on a byte per byte basis */
4647 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4648 /* when zero we reached the end */
4649 output_asm_insn (AS1 (je
,%l12
), xops
);
4650 /* increment the address */
4651 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4653 /* not needed with an alignment of 2 */
4654 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4656 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4657 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4658 output_asm_insn (AS1 (je
,%l12
), xops
);
4659 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4661 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4663 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4667 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4668 output_asm_insn (AS1 (je
,%l12
), xops
);
4669 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4671 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4672 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4673 output_asm_insn (AS1 (je
,%l12
), xops
);
4674 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4676 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4677 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4679 output_asm_insn (AS1 (je
,%l12
), xops
);
4680 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4683 /* Generate loop to check 4 bytes at a time */
4684 /* IMHO it is not a good idea to align this loop. It gives only */
4685 /* huge programs, but does not help to speed up */
4686 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4687 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4689 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4690 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4692 if (QI_REG_P (xops
[1]))
4694 /* On i586 it is faster to combine the hi- and lo- part as
4695 a kind of lookahead. If anding both yields zero, then one
4696 of both *could* be zero, otherwise none of both is zero;
4697 this saves one instruction, on i486 this is slower
4698 tested with P-90, i486DX2-66, AMD486DX2-66 */
4701 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
4702 output_asm_insn (AS1 (jne
,%l9
), xops
);
4705 /* check first byte */
4706 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
4707 output_asm_insn (AS1 (je
,%l12
), xops
);
4709 /* check second byte */
4710 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
4711 output_asm_insn (AS1 (je
,%l11
), xops
);
4714 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[9]));
4718 /* check first byte */
4719 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
4720 output_asm_insn (AS1 (je
,%l12
), xops
);
4722 /* check second byte */
4723 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
4724 output_asm_insn (AS1 (je
,%l11
), xops
);
4727 /* check third byte */
4728 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
4729 output_asm_insn (AS1 (je
,%l10
), xops
);
4731 /* check fourth byte and increment address */
4732 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
4733 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
4734 output_asm_insn (AS1 (jne
,%l8
), xops
);
4736 /* now generate fixups when the compare stops within a 4-byte word */
4737 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
4739 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
4740 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4742 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
4743 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4745 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));