1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem
/* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost
= { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost
= { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost
= {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost
= {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs
*ix86_cost
= &pentium_cost
;
102 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
104 extern FILE *asm_out_file
;
105 extern char *strcat ();
107 char *singlemove_string ();
108 char *output_move_const_single ();
109 char *output_fp_cc0_set ();
111 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
112 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
113 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
115 /* Array of the smallest class containing reg number REGNO, indexed by
116 REGNO. Used by REGNO_REG_CLASS in i386.h. */
118 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
121 AREG
, DREG
, CREG
, BREG
,
123 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
125 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
126 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
131 /* Test and compare insns in i386.md store the information needed to
132 generate branch and scc insns here. */
134 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
135 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
136 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
138 /* which cpu are we scheduling for */
139 enum processor_type ix86_cpu
;
141 /* which instruction set architecture to use. */
144 /* Strings to hold which cpu and instruction set architecture to use. */
145 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
146 char *ix86_arch_string
; /* for -march=<xxx> */
148 /* Register allocation order */
149 char *i386_reg_alloc_order
;
150 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
152 /* # of registers to use to pass arguments. */
153 char *i386_regparm_string
; /* # registers to use to pass args */
154 int i386_regparm
; /* i386_regparm_string as a number */
156 /* Alignment to use for loops and jumps */
157 char *i386_align_loops_string
; /* power of two alignment for loops */
158 char *i386_align_jumps_string
; /* power of two alignment for non-loop jumps */
159 char *i386_align_funcs_string
; /* power of two alignment for functions */
160 char *i386_branch_cost_string
; /* values 1-5: see jump.c */
162 int i386_align_loops
; /* power of two alignment for loops */
163 int i386_align_jumps
; /* power of two alignment for non-loop jumps */
164 int i386_align_funcs
; /* power of two alignment for functions */
165 int i386_branch_cost
; /* values 1-5: see jump.c */
167 /* Sometimes certain combinations of command options do not make
168 sense on a particular target machine. You can define a macro
169 `OVERRIDE_OPTIONS' to take account of this. This macro, if
170 defined, is executed once just after all the command options have
173 Don't use this macro to turn on various extra optimizations for
174 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
185 char *name
; /* Canonical processor name. */
186 enum processor_type processor
; /* Processor type enum value. */
187 struct processor_costs
*cost
; /* Processor costs */
188 int target_enable
; /* Target flags to enable. */
189 int target_disable
; /* Target flags to disable. */
190 } processor_target_table
[]
191 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
192 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
193 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
194 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
195 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
, 0, 0},
196 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
, 0, 0}};
198 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
200 #ifdef SUBTARGET_OVERRIDE_OPTIONS
201 SUBTARGET_OVERRIDE_OPTIONS
;
204 /* Validate registers in register allocation order */
205 if (i386_reg_alloc_order
)
207 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
211 case 'a': regno
= 0; break;
212 case 'd': regno
= 1; break;
213 case 'c': regno
= 2; break;
214 case 'b': regno
= 3; break;
215 case 'S': regno
= 4; break;
216 case 'D': regno
= 5; break;
217 case 'B': regno
= 6; break;
219 default: fatal ("Register '%c' is unknown", ch
);
222 if (regs_allocated
[regno
])
223 fatal ("Register '%c' was already specified in the allocation order", ch
);
225 regs_allocated
[regno
] = 1;
229 if (ix86_arch_string
== (char *)0)
231 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
232 if (ix86_cpu_string
== (char *)0)
233 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
236 for (i
= 0; i
< ptt_size
; i
++)
237 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
239 ix86_arch
= processor_target_table
[i
].processor
;
240 if (ix86_cpu_string
== (char *)0)
241 ix86_cpu_string
= processor_target_table
[i
].name
;
247 error ("bad value (%s) for -march= switch", ix86_arch_string
);
248 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
249 ix86_arch
= PROCESSOR_DEFAULT
;
252 if (ix86_cpu_string
== (char *)0)
253 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
255 for (j
= 0; j
< ptt_size
; j
++)
256 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
258 ix86_cpu
= processor_target_table
[j
].processor
;
259 ix86_cost
= processor_target_table
[j
].cost
;
260 if (i
> j
&& (int)ix86_arch
>= (int)PROCESSOR_PENTIUMPRO
)
261 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string
, ix86_arch_string
);
263 target_flags
|= processor_target_table
[j
].target_enable
;
264 target_flags
&= ~processor_target_table
[j
].target_disable
;
270 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
271 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
272 ix86_cpu
= PROCESSOR_DEFAULT
;
274 /* Validate -mregparm= value */
275 if (i386_regparm_string
)
277 i386_regparm
= atoi (i386_regparm_string
);
278 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
279 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm
, REGPARM_MAX
);
282 /* The 486 suffers more from non-aligned cache line fills, and the larger code
283 size results in a larger cache foot-print and more misses. The 486 has a
284 16 byte cache line, pentium and pentiumpro have a 32 byte cache line */
285 def_align
= (TARGET_486
) ? 4 : 2;
287 /* Validate -malign-loops= value, or provide default */
288 if (i386_align_loops_string
)
290 i386_align_loops
= atoi (i386_align_loops_string
);
291 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
292 fatal ("-malign-loops=%d is not between 0 and %d",
293 i386_align_loops
, MAX_CODE_ALIGN
);
296 i386_align_loops
= 2;
298 /* Validate -malign-jumps= value, or provide default */
299 if (i386_align_jumps_string
)
301 i386_align_jumps
= atoi (i386_align_jumps_string
);
302 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
303 fatal ("-malign-jumps=%d is not between 0 and %d",
304 i386_align_jumps
, MAX_CODE_ALIGN
);
307 i386_align_jumps
= def_align
;
309 /* Validate -malign-functions= value, or provide default */
310 if (i386_align_funcs_string
)
312 i386_align_funcs
= atoi (i386_align_funcs_string
);
313 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
314 fatal ("-malign-functions=%d is not between 0 and %d",
315 i386_align_funcs
, MAX_CODE_ALIGN
);
318 i386_align_funcs
= def_align
;
320 /* Validate -mbranch-cost= value, or provide default */
321 if (i386_branch_cost_string
)
323 i386_branch_cost
= atoi (i386_branch_cost_string
);
324 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
325 fatal ("-mbranch-cost=%d is not between 0 and 5",
329 i386_branch_cost
= 1;
331 if (TARGET_OMIT_LEAF_FRAME_POINTER
) /* keep nonleaf frame pointers */
332 flag_omit_frame_pointer
= 1;
334 /* pic references don't explicitly mention pic_offset_table_rtx */
335 /* code threaded into the prologue may conflict with profiling */
336 if (flag_pic
|| profile_flag
|| profile_block_flag
)
337 target_flags
&= ~MASK_SCHEDULE_PROLOGUE
;
340 /* A C statement (sans semicolon) to choose the order in which to
341 allocate hard registers for pseudo-registers local to a basic
344 Store the desired register order in the array `reg_alloc_order'.
345 Element 0 should be the register to allocate first; element 1, the
346 next register; and so on.
348 The macro body should not assume anything about the contents of
349 `reg_alloc_order' before execution of the macro.
351 On most machines, it is not necessary to define this macro. */
354 order_regs_for_local_alloc ()
356 int i
, ch
, order
, regno
;
358 /* User specified the register allocation order */
359 if (i386_reg_alloc_order
)
361 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
365 case 'a': regno
= 0; break;
366 case 'd': regno
= 1; break;
367 case 'c': regno
= 2; break;
368 case 'b': regno
= 3; break;
369 case 'S': regno
= 4; break;
370 case 'D': regno
= 5; break;
371 case 'B': regno
= 6; break;
374 reg_alloc_order
[order
++] = regno
;
377 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
379 if (!regs_allocated
[i
])
380 reg_alloc_order
[order
++] = i
;
384 /* If users did not specify a register allocation order, use natural order */
387 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
388 reg_alloc_order
[i
] = i
;
394 optimization_options (level
)
397 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
398 make the problem with not enough registers even worse */
399 #ifdef INSN_SCHEDULING
401 flag_schedule_insns
= 0;
405 /* Sign-extend a 16-bit constant */
408 i386_sext16_if_const (op
)
411 if (GET_CODE (op
) == CONST_INT
)
413 HOST_WIDE_INT val
= INTVAL (op
);
414 HOST_WIDE_INT sext_val
;
416 sext_val
= val
| ~0xffff;
418 sext_val
= val
& 0xffff;
420 op
= GEN_INT (sext_val
);
425 /* Return nonzero if the rtx is aligned */
428 i386_aligned_reg_p (regno
)
431 return (regno
== STACK_POINTER_REGNUM
432 || (!flag_omit_frame_pointer
433 && regno
== FRAME_POINTER_REGNUM
));
440 /* registers and immediate operands are always "aligned" */
441 if (GET_CODE (op
) != MEM
)
444 /* Don't even try to do any aligned optimizations with volatiles */
445 if (MEM_VOLATILE_P (op
))
448 /* Get address of memory operand */
451 switch (GET_CODE (op
))
458 /* match "reg + offset" */
460 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
462 if (INTVAL (XEXP (op
, 1)) & 3)
465 if (GET_CODE (op
) != REG
)
469 return i386_aligned_reg_p (REGNO (op
));
474 /* Return nonzero if INSN looks like it won't compute useful cc bits
475 as a side effect. This information is only a hint. */
478 i386_cc_probably_useless_p (insn
)
481 return !next_cc0_user (insn
);
484 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
485 attribute for DECL. The attributes in ATTRIBUTES have previously been
489 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
498 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
499 attribute for TYPE. The attributes in ATTRIBUTES have previously been
503 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
509 if (TREE_CODE (type
) != FUNCTION_TYPE
510 && TREE_CODE (type
) != FIELD_DECL
511 && TREE_CODE (type
) != TYPE_DECL
)
514 /* Stdcall attribute says callee is responsible for popping arguments
515 if they are not variable. */
516 if (is_attribute_p ("stdcall", identifier
))
517 return (args
== NULL_TREE
);
519 /* Cdecl attribute says the callee is a normal C declaration */
520 if (is_attribute_p ("cdecl", identifier
))
521 return (args
== NULL_TREE
);
523 /* Regparm attribute specifies how many integer arguments are to be
524 passed in registers */
525 if (is_attribute_p ("regparm", identifier
))
529 if (!args
|| TREE_CODE (args
) != TREE_LIST
530 || TREE_CHAIN (args
) != NULL_TREE
531 || TREE_VALUE (args
) == NULL_TREE
)
534 cst
= TREE_VALUE (args
);
535 if (TREE_CODE (cst
) != INTEGER_CST
)
538 if (TREE_INT_CST_HIGH (cst
) != 0
539 || TREE_INT_CST_LOW (cst
) < 0
540 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
549 /* Return 0 if the attributes for two types are incompatible, 1 if they
550 are compatible, and 2 if they are nearly compatible (which causes a
551 warning to be generated). */
554 i386_comp_type_attributes (type1
, type2
)
562 /* Value is the number of bytes of arguments automatically
563 popped when returning from a subroutine call.
564 FUNDECL is the declaration node of the function (as a tree),
565 FUNTYPE is the data type of the function (as a tree),
566 or for a library call it is an identifier node for the subroutine name.
567 SIZE is the number of bytes of arguments passed on the stack.
569 On the 80386, the RTD insn may be used to pop them if the number
570 of args is fixed, but if the number is variable then the caller
571 must pop them all. RTD can't be used for library calls now
572 because the library is compiled with the Unix compiler.
573 Use of RTD is a selectable option, since it is incompatible with
574 standard Unix calling sequences. If the option is not selected,
575 the caller must always pop the args.
577 The attribute stdcall is equivalent to RTD on a per module basis. */
580 i386_return_pops_args (fundecl
, funtype
, size
)
585 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
587 /* Cdecl functions override -mrtd, and never pop the stack */
588 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
590 /* Stdcall functions will pop the stack if not variable args */
591 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
595 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
596 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
)))
600 /* Lose any fake structure return argument */
601 if (aggregate_value_p (TREE_TYPE (funtype
)))
602 return GET_MODE_SIZE (Pmode
);
608 /* Argument support functions. */
610 /* Initialize a variable CUM of type CUMULATIVE_ARGS
611 for a call to a function whose data type is FNTYPE.
612 For a library call, FNTYPE is 0. */
615 init_cumulative_args (cum
, fntype
, libname
)
616 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
617 tree fntype
; /* tree ptr for function decl */
618 rtx libname
; /* SYMBOL_REF of library name or 0 */
620 static CUMULATIVE_ARGS zero_cum
;
621 tree param
, next_param
;
623 if (TARGET_DEBUG_ARG
)
625 fprintf (stderr
, "\ninit_cumulative_args (");
628 tree ret_type
= TREE_TYPE (fntype
);
629 fprintf (stderr
, "fntype code = %s, ret code = %s",
630 tree_code_name
[ (int)TREE_CODE (fntype
) ],
631 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
634 fprintf (stderr
, "no fntype");
637 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
642 /* Set up the number of registers to use for passing arguments. */
643 cum
->nregs
= i386_regparm
;
646 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
648 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
651 /* Determine if this function has variable arguments. This is
652 indicated by the last argument being 'void_type_mode' if there
653 are no variable arguments. If there are variable arguments, then
654 we won't pass anything in registers */
658 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
662 next_param
= TREE_CHAIN (param
);
663 if (next_param
== (tree
)0 && TREE_VALUE (param
) != void_type_node
)
668 if (TARGET_DEBUG_ARG
)
669 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
674 /* Update the data in CUM to advance over an argument
675 of mode MODE and data type TYPE.
676 (TYPE is null for libcalls where that information may not be available.) */
679 function_arg_advance (cum
, mode
, type
, named
)
680 CUMULATIVE_ARGS
*cum
; /* current arg information */
681 enum machine_mode mode
; /* current arg mode */
682 tree type
; /* type of the argument or 0 if lib support */
683 int named
; /* whether or not the argument was named */
685 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
686 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
688 if (TARGET_DEBUG_ARG
)
690 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
691 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
706 /* Define where to put the arguments to a function.
707 Value is zero to push the argument on the stack,
708 or a hard register in which to store the argument.
710 MODE is the argument's machine mode.
711 TYPE is the data type of the argument (as a tree).
712 This is null for libcalls where that information may
714 CUM is a variable of type CUMULATIVE_ARGS which gives info about
715 the preceding args and about the function being called.
716 NAMED is nonzero if this argument is a named parameter
717 (otherwise it is an extra parameter matching an ellipsis). */
720 function_arg (cum
, mode
, type
, named
)
721 CUMULATIVE_ARGS
*cum
; /* current arg information */
722 enum machine_mode mode
; /* current arg mode */
723 tree type
; /* type of the argument or 0 if lib support */
724 int named
; /* != 0 for normal args, == 0 for ... args */
727 int bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
728 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
732 default: /* for now, pass fp/complex values on the stack */
740 if (words
<= cum
->nregs
)
741 ret
= gen_rtx (REG
, mode
, cum
->regno
);
745 if (TARGET_DEBUG_ARG
)
748 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
749 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
752 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
754 fprintf (stderr
, ", stack");
756 fprintf (stderr
, " )\n");
762 /* For an arg passed partly in registers and partly in memory,
763 this is the number of registers used.
764 For args passed entirely in registers or entirely in memory, zero. */
767 function_arg_partial_nregs (cum
, mode
, type
, named
)
768 CUMULATIVE_ARGS
*cum
; /* current arg information */
769 enum machine_mode mode
; /* current arg mode */
770 tree type
; /* type of the argument or 0 if lib support */
771 int named
; /* != 0 for normal args, == 0 for ... args */
777 /* Output an insn whose source is a 386 integer register. SRC is the
778 rtx for the register, and TEMPLATE is the op-code template. SRC may
779 be either SImode or DImode.
781 The template will be output with operands[0] as SRC, and operands[1]
782 as a pointer to the top of the 386 stack. So a call from floatsidf2
783 would look like this:
785 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
787 where %z0 corresponds to the caller's operands[1], and is used to
788 emit the proper size suffix.
790 ??? Extend this to handle HImode - a 387 can load and store HImode
794 output_op_from_reg (src
, template)
799 int size
= GET_MODE_SIZE (GET_MODE (src
));
802 xops
[1] = AT_SP (Pmode
);
803 xops
[2] = GEN_INT (size
);
804 xops
[3] = stack_pointer_rtx
;
806 if (size
> UNITS_PER_WORD
)
809 if (size
> 2 * UNITS_PER_WORD
)
811 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 2);
812 output_asm_insn (AS1 (push
%L0
,%0), &high
);
814 high
= gen_rtx (REG
, SImode
, REGNO (src
) + 1);
815 output_asm_insn (AS1 (push
%L0
,%0), &high
);
817 output_asm_insn (AS1 (push
%L0
,%0), &src
);
819 output_asm_insn (template, xops
);
821 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
824 /* Output an insn to pop an value from the 387 top-of-stack to 386
825 register DEST. The 387 register stack is popped if DIES is true. If
826 the mode of DEST is an integer mode, a `fist' integer store is done,
827 otherwise a `fst' float store is done. */
830 output_to_reg (dest
, dies
, scratch_mem
)
836 int size
= GET_MODE_SIZE (GET_MODE (dest
));
839 xops
[0] = AT_SP (Pmode
);
841 xops
[0] = scratch_mem
;
842 xops
[1] = stack_pointer_rtx
;
843 xops
[2] = GEN_INT (size
);
847 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
849 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
852 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
854 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
856 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
859 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
862 if (GET_MODE (dest
) == XFmode
)
864 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
865 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
868 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
875 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
877 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
880 if (size
> UNITS_PER_WORD
)
882 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
884 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
887 xops
[0] = adj_offsettable_operand (xops
[0], 4);
889 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
891 if (size
> 2 * UNITS_PER_WORD
)
893 dest
= gen_rtx (REG
, SImode
, REGNO (dest
) + 1);
895 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
898 xops
[0] = adj_offsettable_operand (xops
[0], 4);
899 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
906 singlemove_string (operands
)
910 if (GET_CODE (operands
[0]) == MEM
911 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
913 if (XEXP (x
, 0) != stack_pointer_rtx
)
917 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
919 return output_move_const_single (operands
);
921 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
922 return AS2 (mov
%L0
,%1,%0);
923 else if (CONSTANT_P (operands
[1]))
924 return AS2 (mov
%L0
,%1,%0);
927 output_asm_insn ("push%L1 %1", operands
);
932 /* Return a REG that occurs in ADDR with coefficient 1.
933 ADDR can be effectively incremented by incrementing REG. */
939 while (GET_CODE (addr
) == PLUS
)
941 if (GET_CODE (XEXP (addr
, 0)) == REG
)
942 addr
= XEXP (addr
, 0);
943 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
944 addr
= XEXP (addr
, 1);
945 else if (CONSTANT_P (XEXP (addr
, 0)))
946 addr
= XEXP (addr
, 1);
947 else if (CONSTANT_P (XEXP (addr
, 1)))
948 addr
= XEXP (addr
, 0);
952 if (GET_CODE (addr
) == REG
)
958 /* Output an insn to add the constant N to the register X. */
969 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
971 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
972 else if (n
< 0 || n
== 128)
974 xops
[1] = GEN_INT (-n
);
975 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
979 xops
[1] = GEN_INT (n
);
980 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
985 /* Output assembler code to perform a doubleword move insn
986 with operands OPERANDS. */
989 output_move_double (operands
)
992 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
996 rtx addreg0
= 0, addreg1
= 0;
997 int dest_overlapped_low
= 0;
998 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1003 /* First classify both operands. */
1005 if (REG_P (operands
[0]))
1007 else if (offsettable_memref_p (operands
[0]))
1009 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1011 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1013 else if (GET_CODE (operands
[0]) == MEM
)
1018 if (REG_P (operands
[1]))
1020 else if (CONSTANT_P (operands
[1]))
1022 else if (offsettable_memref_p (operands
[1]))
1024 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1026 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1028 else if (GET_CODE (operands
[1]) == MEM
)
1033 /* Check for the cases that the operand constraints are not
1034 supposed to allow to happen. Abort if we get one,
1035 because generating code for these cases is painful. */
1037 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1040 /* If one operand is decrementing and one is incrementing
1041 decrement the former register explicitly
1042 and change that operand into ordinary indexing. */
1044 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1046 /* ??? Can this ever happen on i386? */
1047 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1048 asm_add (-size
, operands
[0]);
1049 if (GET_MODE (operands
[1]) == XFmode
)
1050 operands
[0] = gen_rtx (MEM
, XFmode
, operands
[0]);
1051 else if (GET_MODE (operands
[0]) == DFmode
)
1052 operands
[0] = gen_rtx (MEM
, DFmode
, operands
[0]);
1054 operands
[0] = gen_rtx (MEM
, DImode
, operands
[0]);
1058 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1060 /* ??? Can this ever happen on i386? */
1061 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1062 asm_add (-size
, operands
[1]);
1063 if (GET_MODE (operands
[1]) == XFmode
)
1064 operands
[1] = gen_rtx (MEM
, XFmode
, operands
[1]);
1065 else if (GET_MODE (operands
[1]) == DFmode
)
1066 operands
[1] = gen_rtx (MEM
, DFmode
, operands
[1]);
1068 operands
[1] = gen_rtx (MEM
, DImode
, operands
[1]);
1072 /* If an operand is an unoffsettable memory ref, find a register
1073 we can increment temporarily to make it refer to the second word. */
1075 if (optype0
== MEMOP
)
1076 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1078 if (optype1
== MEMOP
)
1079 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1081 /* Ok, we can do one word at a time.
1082 Normally we do the low-numbered word first,
1083 but if either operand is autodecrementing then we
1084 do the high-numbered word first.
1086 In either case, set up in LATEHALF the operands to use
1087 for the high-numbered word and in some cases alter the
1088 operands in OPERANDS to be suitable for the low-numbered word. */
1092 if (optype0
== REGOP
)
1094 middlehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1095 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 2);
1097 else if (optype0
== OFFSOP
)
1099 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1100 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1104 middlehalf
[0] = operands
[0];
1105 latehalf
[0] = operands
[0];
1108 if (optype1
== REGOP
)
1110 middlehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1111 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 2);
1113 else if (optype1
== OFFSOP
)
1115 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1116 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1118 else if (optype1
== CNSTOP
)
1120 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1122 REAL_VALUE_TYPE r
; long l
[3];
1124 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1125 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1126 operands
[1] = GEN_INT (l
[0]);
1127 middlehalf
[1] = GEN_INT (l
[1]);
1128 latehalf
[1] = GEN_INT (l
[2]);
1130 else if (CONSTANT_P (operands
[1]))
1131 /* No non-CONST_DOUBLE constant should ever appear here. */
1136 middlehalf
[1] = operands
[1];
1137 latehalf
[1] = operands
[1];
1140 else /* size is not 12: */
1142 if (optype0
== REGOP
)
1143 latehalf
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
1144 else if (optype0
== OFFSOP
)
1145 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1147 latehalf
[0] = operands
[0];
1149 if (optype1
== REGOP
)
1150 latehalf
[1] = gen_rtx (REG
, SImode
, REGNO (operands
[1]) + 1);
1151 else if (optype1
== OFFSOP
)
1152 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1153 else if (optype1
== CNSTOP
)
1154 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1156 latehalf
[1] = operands
[1];
1159 /* If insn is effectively movd N (sp),-(sp) then we will do the
1160 high word first. We should use the adjusted operand 1
1161 (which is N+4 (sp) or N+8 (sp))
1162 for the low word and middle word as well,
1163 to compensate for the first decrement of sp. */
1164 if (optype0
== PUSHOP
1165 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1166 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1167 middlehalf
[1] = operands
[1] = latehalf
[1];
1169 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1170 if the upper part of reg N does not appear in the MEM, arrange to
1171 emit the move late-half first. Otherwise, compute the MEM address
1172 into the upper part of N and use that as a pointer to the memory
1174 if (optype0
== REGOP
1175 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1177 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1178 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1180 /* If both halves of dest are used in the src memory address,
1181 compute the address into latehalf of dest. */
1183 xops
[0] = latehalf
[0];
1184 xops
[1] = XEXP (operands
[1], 0);
1185 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1186 if( GET_MODE (operands
[1]) == XFmode
)
1189 operands
[1] = gen_rtx (MEM
, XFmode
, latehalf
[0]);
1190 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1191 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1195 operands
[1] = gen_rtx (MEM
, DImode
, latehalf
[0]);
1196 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1200 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1202 /* Check for two regs used by both source and dest. */
1203 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1204 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1207 /* JRV says this can't happen: */
1208 if (addreg0
|| addreg1
)
1211 /* Only the middle reg conflicts; simply put it last. */
1212 output_asm_insn (singlemove_string (operands
), operands
);
1213 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1214 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1217 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1218 /* If the low half of dest is mentioned in the source memory
1219 address, the arrange to emit the move late half first. */
1220 dest_overlapped_low
= 1;
1223 /* If one or both operands autodecrementing,
1224 do the two words, high-numbered first. */
1226 /* Likewise, the first move would clobber the source of the second one,
1227 do them in the other order. This happens only for registers;
1228 such overlap can't happen in memory unless the user explicitly
1229 sets it up, and that is an undefined circumstance. */
1232 if (optype0 == PUSHOP || optype1 == PUSHOP
1233 || (optype0 == REGOP && optype1 == REGOP
1234 && REGNO (operands[0]) == REGNO (latehalf[1]))
1235 || dest_overlapped_low)
1237 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1238 || (optype0
== REGOP
&& optype1
== REGOP
1239 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1240 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1241 || dest_overlapped_low
)
1243 /* Make any unoffsettable addresses point at high-numbered word. */
1245 asm_add (size
-4, addreg0
);
1247 asm_add (size
-4, addreg1
);
1250 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1252 /* Undo the adds we just did. */
1254 asm_add (-4, addreg0
);
1256 asm_add (-4, addreg1
);
1260 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1262 asm_add (-4, addreg0
);
1264 asm_add (-4, addreg1
);
1267 /* Do low-numbered word. */
1268 return singlemove_string (operands
);
1271 /* Normal case: do the two words, low-numbered first. */
1273 output_asm_insn (singlemove_string (operands
), operands
);
1275 /* Do the middle one of the three words for long double */
1279 asm_add (4, addreg0
);
1281 asm_add (4, addreg1
);
1283 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1286 /* Make any unoffsettable addresses point at high-numbered word. */
1288 asm_add (4, addreg0
);
1290 asm_add (4, addreg1
);
1293 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1295 /* Undo the adds we just did. */
1297 asm_add (4-size
, addreg0
);
1299 asm_add (4-size
, addreg1
);
1305 #define MAX_TMPS 2 /* max temporary registers used */
1307 /* Output the appropriate code to move push memory on the stack */
1310 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1322 } tmp_info
[MAX_TMPS
];
1324 rtx src
= operands
[1];
1327 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1328 int stack_offset
= 0;
1332 if (!offsettable_memref_p (src
))
1333 fatal_insn ("Source is not offsettable", insn
);
1335 if ((length
& 3) != 0)
1336 fatal_insn ("Pushing non-word aligned size", insn
);
1338 /* Figure out which temporary registers we have available */
1339 for (i
= tmp_start
; i
< n_operands
; i
++)
1341 if (GET_CODE (operands
[i
]) == REG
)
1343 if (reg_overlap_mentioned_p (operands
[i
], src
))
1346 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1347 if (max_tmps
== MAX_TMPS
)
1353 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1355 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1356 output_asm_insn (AS1(push
%L0
,%0), xops
);
1362 for (offset
= length
- 4; offset
>= 0; )
1364 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1366 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1367 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1368 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1372 for (i
= 0; i
< num_tmps
; i
++)
1373 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1375 for (i
= 0; i
< num_tmps
; i
++)
1376 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1379 stack_offset
+= 4*num_tmps
;
1387 /* Output the appropriate code to move data between two memory locations */
1390 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1401 } tmp_info
[MAX_TMPS
];
1403 rtx dest
= operands
[0];
1404 rtx src
= operands
[1];
1405 rtx qi_tmp
= NULL_RTX
;
1411 if (GET_CODE (dest
) == MEM
1412 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1413 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1414 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1416 if (!offsettable_memref_p (src
))
1417 fatal_insn ("Source is not offsettable", insn
);
1419 if (!offsettable_memref_p (dest
))
1420 fatal_insn ("Destination is not offsettable", insn
);
1422 /* Figure out which temporary registers we have available */
1423 for (i
= tmp_start
; i
< n_operands
; i
++)
1425 if (GET_CODE (operands
[i
]) == REG
)
1427 if ((length
& 1) != 0 && !qi_tmp
&& QI_REG_P (operands
[i
]))
1428 qi_tmp
= operands
[i
];
1430 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1431 fatal_insn ("Temporary register overlaps the destination", insn
);
1433 if (reg_overlap_mentioned_p (operands
[i
], src
))
1434 fatal_insn ("Temporary register overlaps the source", insn
);
1436 tmp_info
[ max_tmps
++ ].xops
[2] = operands
[i
];
1437 if (max_tmps
== MAX_TMPS
)
1443 fatal_insn ("No scratch registers were found to do memory->memory moves", insn
);
1445 if ((length
& 1) != 0)
1448 fatal_insn ("No byte register found when moving odd # of bytes.", insn
);
1453 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1457 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1458 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1459 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1460 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1464 else if (length
>= 2)
1466 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1467 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1468 tmp_info
[num_tmps
].xops
[0] = adj_offsettable_operand (dest
, offset
);
1469 tmp_info
[num_tmps
].xops
[1] = adj_offsettable_operand (src
, offset
);
1477 for (i
= 0; i
< num_tmps
; i
++)
1478 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1480 for (i
= 0; i
< num_tmps
; i
++)
1481 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1486 xops
[0] = adj_offsettable_operand (dest
, offset
);
1487 xops
[1] = adj_offsettable_operand (src
, offset
);
1489 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1490 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1498 standard_80387_constant_p (x
)
1501 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1506 if (setjmp (handler
))
1509 set_float_handler (handler
);
1510 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1511 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1512 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1513 set_float_handler (NULL_PTR
);
1521 /* Note that on the 80387, other constants, such as pi,
1522 are much slower to load as standard constants
1523 than to load from doubles in memory! */
1530 output_move_const_single (operands
)
1533 if (FP_REG_P (operands
[0]))
1535 int conval
= standard_80387_constant_p (operands
[1]);
1543 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1545 REAL_VALUE_TYPE r
; long l
;
1547 if (GET_MODE (operands
[1]) == XFmode
)
1550 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1551 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1552 operands
[1] = GEN_INT (l
);
1554 return singlemove_string (operands
);
1557 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1558 reference and a constant. */
1561 symbolic_operand (op
, mode
)
1563 enum machine_mode mode
;
1565 switch (GET_CODE (op
))
1572 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1573 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1574 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1580 /* Test for a valid operand for a call instruction.
1581 Don't allow the arg pointer register or virtual regs
1582 since they may change into reg + const, which the patterns
1583 can't handle yet. */
1586 call_insn_operand (op
, mode
)
1588 enum machine_mode mode
;
1590 if (GET_CODE (op
) == MEM
1591 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1592 /* This makes a difference for PIC. */
1593 && general_operand (XEXP (op
, 0), Pmode
))
1594 || (GET_CODE (XEXP (op
, 0)) == REG
1595 && XEXP (op
, 0) != arg_pointer_rtx
1596 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1597 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1602 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1606 expander_call_insn_operand (op
, mode
)
1608 enum machine_mode mode
;
1610 if (GET_CODE (op
) == MEM
1611 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1612 || (GET_CODE (XEXP (op
, 0)) == REG
1613 && XEXP (op
, 0) != arg_pointer_rtx
1614 && !(REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1615 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1620 /* Return 1 if OP is a comparison operator that can use the condition code
1621 generated by an arithmetic operation. */
1624 arithmetic_comparison_operator (op
, mode
)
1626 enum machine_mode mode
;
1630 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1632 code
= GET_CODE (op
);
1633 if (GET_RTX_CLASS (code
) != '<')
1636 return (code
!= GT
&& code
!= LE
);
1639 /* Returns 1 if OP contains a symbol reference */
1642 symbolic_reference_mentioned_p (op
)
1648 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1651 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1652 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1658 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1659 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1662 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1669 /* Attempt to expand a binary operator. Make the expansion closer to the
1670 actual machine, then just general_operand, which will allow 3 separate
1671 memory references (one output, two input) in a single insn. Return
1672 whether the insn fails, or succeeds. */
1675 ix86_expand_binary_operator (code
, mode
, operands
)
1677 enum machine_mode mode
;
1684 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1685 if (GET_RTX_CLASS (code
) == 'c'
1686 && (rtx_equal_p (operands
[0], operands
[2])
1687 || immediate_operand (operands
[1], mode
)))
1689 rtx temp
= operands
[1];
1690 operands
[1] = operands
[2];
1694 /* If optimizing, copy to regs to improve CSE */
1695 if (TARGET_PSEUDO
&& optimize
&& ((reload_in_progress
| reload_completed
) == 0))
1697 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1698 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1700 if (GET_CODE (operands
[2]) == MEM
)
1701 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1703 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1705 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1706 emit_move_insn (temp
, operands
[1]);
1712 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1714 /* If not optimizing, try to make a valid insn (optimize code previously did
1715 this above to improve chances of CSE) */
1717 if ((!TARGET_PSEUDO
|| !optimize
)
1718 && ((reload_in_progress
| reload_completed
) == 0)
1719 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1722 if (GET_CODE (operands
[1]) == MEM
&& !rtx_equal_p (operands
[0], operands
[1]))
1724 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1728 if (GET_CODE (operands
[2]) == MEM
)
1730 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1734 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1736 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1737 emit_move_insn (temp
, operands
[1]);
1742 if (modified
&& !ix86_binary_operator_ok (code
, mode
, operands
))
1752 /* Return TRUE or FALSE depending on whether the binary operator meets the
1753 appropriate constraints. */
1756 ix86_binary_operator_ok (code
, mode
, operands
)
1758 enum machine_mode mode
;
1761 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1762 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1765 /* Attempt to expand a unary operator. Make the expansion closer to the
1766 actual machine, then just general_operand, which will allow 2 separate
1767 memory references (one output, one input) in a single insn. Return
1768 whether the insn fails, or succeeds. */
1771 ix86_expand_unary_operator (code
, mode
, operands
)
1773 enum machine_mode mode
;
1778 /* If optimizing, copy to regs to improve CSE */
1781 && ((reload_in_progress
| reload_completed
) == 0)
1782 && GET_CODE (operands
[1]) == MEM
)
1784 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1787 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1789 if ((!TARGET_PSEUDO
|| !optimize
)
1790 && ((reload_in_progress
| reload_completed
) == 0)
1791 && GET_CODE (operands
[1]) == MEM
)
1793 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1794 if (!ix86_unary_operator_ok (code
, mode
, operands
))
1804 /* Return TRUE or FALSE depending on whether the unary operator meets the
1805 appropriate constraints. */
1808 ix86_unary_operator_ok (code
, mode
, operands
)
1810 enum machine_mode mode
;
1818 static rtx pic_label_rtx
;
1819 static char pic_label_name
[256];
1820 static int pic_label_no
= 0;
1822 /* This function generates code for -fpic that loads %ebx with
1823 with the return address of the caller and then returns. */
1825 asm_output_function_prefix (file
, name
)
1830 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1831 || current_function_uses_const_pool
);
1832 xops
[0] = pic_offset_table_rtx
;
1833 xops
[1] = stack_pointer_rtx
;
1835 /* deep branch prediction favors having a return for every call */
1836 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1840 if (pic_label_rtx
== 0)
1842 pic_label_rtx
= (rtx
) gen_label_rtx ();
1843 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1844 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1846 prologue_node
= make_node (FUNCTION_DECL
);
1847 DECL_RESULT (prologue_node
) = 0;
1848 #ifdef ASM_DECLARE_FUNCTION_NAME
1849 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1851 output_asm_insn ("movl (%1),%0", xops
);
1852 output_asm_insn ("ret", xops
);
1856 /* Set up the stack and frame (if desired) for the function. */
1859 function_prologue (file
, size
)
1866 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1867 || current_function_uses_const_pool
);
1868 long tsize
= get_frame_size ();
1870 /* pic references don't explicitly mention pic_offset_table_rtx */
1871 if (TARGET_SCHEDULE_PROLOGUE
)
1877 xops
[0] = stack_pointer_rtx
;
1878 xops
[1] = frame_pointer_rtx
;
1879 xops
[2] = GEN_INT (tsize
);
1881 if (frame_pointer_needed
)
1883 output_asm_insn ("push%L1 %1", xops
);
1884 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
1889 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1890 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
1893 xops
[3] = gen_rtx (REG
, SImode
, 0);
1894 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
1896 xops
[3] = gen_rtx (SYMBOL_REF
, Pmode
, "_alloca");
1897 output_asm_insn (AS1 (call
,%P3
), xops
);
1900 /* Note If use enter it is NOT reversed args.
1901 This one is not reversed from intel!!
1902 I think enter is slower. Also sdb doesn't like it.
1903 But if you want it the code is:
1905 xops[3] = const0_rtx;
1906 output_asm_insn ("enter %2,%3", xops);
1909 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1910 for (regno
= limit
- 1; regno
>= 0; regno
--)
1911 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1912 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1914 xops
[0] = gen_rtx (REG
, SImode
, regno
);
1915 output_asm_insn ("push%L0 %0", xops
);
1918 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1920 xops
[0] = pic_offset_table_rtx
;
1921 xops
[1] = gen_rtx (SYMBOL_REF
, Pmode
, LABEL_NAME (pic_label_rtx
));
1923 output_asm_insn (AS1 (call
,%P1
), xops
);
1924 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1927 else if (pic_reg_used
)
1929 xops
[0] = pic_offset_table_rtx
;
1930 xops
[1] = (rtx
) gen_label_rtx ();
1932 output_asm_insn (AS1 (call
,%P1
), xops
);
1933 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (xops
[1]));
1934 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1935 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1939 /* This function generates the assembly code for function entry.
1940 FILE is an stdio stream to output the code to.
1941 SIZE is an int: how many units of temporary storage to allocate. */
1944 ix86_expand_prologue ()
1949 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1950 || current_function_uses_const_pool
);
1951 long tsize
= get_frame_size ();
1954 if (!TARGET_SCHEDULE_PROLOGUE
)
1957 xops
[0] = stack_pointer_rtx
;
1958 xops
[1] = frame_pointer_rtx
;
1959 xops
[2] = GEN_INT (tsize
);
1960 if (frame_pointer_needed
)
1964 gen_rtx (MEM
, SImode
,
1965 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
1966 frame_pointer_rtx
));
1967 RTX_FRAME_RELATED_P (insn
) = 1;
1968 insn
= emit_move_insn (xops
[1], xops
[0]);
1969 RTX_FRAME_RELATED_P (insn
) = 1;
1974 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
1976 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
1977 RTX_FRAME_RELATED_P (insn
) = 1;
1981 xops
[3] = gen_rtx (REG
, SImode
, 0);
1982 emit_move_insn (xops
[3], xops
[2]);
1983 xops
[3] = gen_rtx (MEM
, FUNCTION_MODE
,
1984 gen_rtx (SYMBOL_REF
, Pmode
, "_alloca"));
1985 emit_call_insn (gen_rtx (CALL
, VOIDmode
,
1986 xops
[3], const0_rtx
));
1989 /* Note If use enter it is NOT reversed args.
1990 This one is not reversed from intel!!
1991 I think enter is slower. Also sdb doesn't like it.
1992 But if you want it the code is:
1994 xops[3] = const0_rtx;
1995 output_asm_insn ("enter %2,%3", xops);
1998 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
1999 for (regno
= limit
- 1; regno
>= 0; regno
--)
2000 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2001 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2003 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2006 gen_rtx (MEM
, SImode
,
2007 gen_rtx (PRE_DEC
, SImode
, stack_pointer_rtx
)),
2010 RTX_FRAME_RELATED_P (insn
) = 1;
2013 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
2015 xops
[0] = pic_offset_table_rtx
;
2016 if (pic_label_rtx
== 0)
2018 pic_label_rtx
= (rtx
) gen_label_rtx ();
2019 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
2020 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
2022 xops
[1] = gen_rtx (MEM
, QImode
, gen_rtx (SYMBOL_REF
, Pmode
, LABEL_NAME (pic_label_rtx
)));
2024 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
2025 emit_insn (gen_prologue_set_got (xops
[0],
2026 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
2027 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
2029 else if (pic_reg_used
)
2031 xops
[0] = pic_offset_table_rtx
;
2032 xops
[1] = (rtx
) gen_label_rtx ();
2034 emit_insn (gen_prologue_get_pc (xops
[0], gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER(xops
[1]))));
2035 emit_insn (gen_pop (xops
[0]));
2036 emit_insn (gen_prologue_set_got (xops
[0],
2037 gen_rtx (SYMBOL_REF
, Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
2038 gen_rtx (CONST_INT
, Pmode
, CODE_LABEL_NUMBER (xops
[1]))));
2042 /* Restore function stack, frame, and registers. */
2045 function_epilogue (file
, size
)
2051 /* Return 1 if it is appropriate to emit `ret' instructions in the
2052 body of a function. Do this only if the epilogue is simple, needing a
2053 couple of insns. Prior to reloading, we can't tell how many registers
2054 must be saved, so return 0 then. Return 0 if there is no frame
2055 marker to de-allocate.
2057 If NON_SAVING_SETJMP is defined and true, then it is not possible
2058 for the epilogue to be simple, so return 0. This is a special case
2059 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2060 until final, but jump_optimize may need to know sooner if a
2064 ix86_can_use_return_insn_p ()
2068 int reglimit
= (frame_pointer_needed
2069 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2070 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2071 || current_function_uses_const_pool
);
2073 #ifdef NON_SAVING_SETJMP
2074 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2078 if (! reload_completed
)
2081 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2082 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2083 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2086 return nregs
== 0 || ! frame_pointer_needed
;
2090 /* This function generates the assembly code for function exit.
2091 FILE is an stdio stream to output the code to.
2092 SIZE is an int: how many units of temporary storage to deallocate. */
2095 ix86_expand_epilogue ()
2098 register int nregs
, limit
;
2101 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2102 || current_function_uses_const_pool
);
2103 long tsize
= get_frame_size ();
2105 /* Compute the number of registers to pop */
2107 limit
= (frame_pointer_needed
2108 ? FRAME_POINTER_REGNUM
2109 : STACK_POINTER_REGNUM
);
2113 for (regno
= limit
- 1; regno
>= 0; regno
--)
2114 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2115 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2118 /* sp is often unreliable so we must go off the frame pointer,
2121 /* In reality, we may not care if sp is unreliable, because we can
2122 restore the register relative to the frame pointer. In theory,
2123 since each move is the same speed as a pop, and we don't need the
2124 leal, this is faster. For now restore multiple registers the old
2127 offset
= -tsize
- (nregs
* UNITS_PER_WORD
);
2129 xops
[2] = stack_pointer_rtx
;
2131 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2132 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2133 moved before any instruction which implicitly uses the got. This
2134 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2136 Alternatively, this could be fixed by making the dependence on the
2137 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2139 emit_insn (gen_blockage ());
2141 if (nregs
> 1 || ! frame_pointer_needed
)
2143 if (frame_pointer_needed
)
2145 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2146 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2147 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2150 for (regno
= 0; regno
< limit
; regno
++)
2151 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2152 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2154 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2155 emit_insn (gen_pop (xops
[0]));
2156 /* output_asm_insn ("pop%L0 %0", xops);*/
2160 for (regno
= 0; regno
< limit
; regno
++)
2161 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2162 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2164 xops
[0] = gen_rtx (REG
, SImode
, regno
);
2165 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2166 emit_move_insn (xops
[0], xops
[1]);
2167 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2171 if (frame_pointer_needed
)
2173 /* If not an i386, mov & pop is faster than "leave". */
2175 if (TARGET_USE_LEAVE
)
2176 emit_insn (gen_leave());
2177 /* output_asm_insn ("leave", xops);*/
2180 xops
[0] = frame_pointer_rtx
;
2181 xops
[1] = stack_pointer_rtx
;
2182 emit_insn (gen_epilogue_set_stack_ptr());
2183 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2184 emit_insn (gen_pop (xops
[0]));
2185 /* output_asm_insn ("pop%L0 %0", xops);*/
2190 /* If there is no frame pointer, we must still release the frame. */
2192 xops
[0] = GEN_INT (tsize
);
2193 emit_insn (gen_rtx (SET
, SImode
,
2195 gen_rtx (PLUS
, SImode
,
2198 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2201 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2202 if (profile_block_flag
== 2)
2204 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2208 if (current_function_pops_args
&& current_function_args_size
)
2210 xops
[1] = GEN_INT (current_function_pops_args
);
2212 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2213 asked to pop more, pop return address, do explicit add, and jump
2214 indirectly to the caller. */
2216 if (current_function_pops_args
>= 32768)
2218 /* ??? Which register to use here? */
2219 xops
[0] = gen_rtx (REG
, SImode
, 2);
2220 emit_insn (gen_pop (xops
[0]));
2221 /* output_asm_insn ("pop%L0 %0", xops);*/
2222 emit_insn (gen_rtx (SET
, SImode
,
2224 gen_rtx (PLUS
, SImode
,
2227 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2228 emit_jump_insn (xops
[0]);
2229 /* output_asm_insn ("jmp %*%0", xops);*/
2232 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2233 /* output_asm_insn ("ret %1", xops);*/
2236 /* output_asm_insn ("ret", xops);*/
2237 emit_jump_insn (gen_return_internal ());
2241 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2242 that is a valid memory address for an instruction.
2243 The MODE argument is the machine mode for the MEM expression
2244 that wants to use this address.
2246 On x86, legitimate addresses are:
2247 base movl (base),reg
2248 displacement movl disp,reg
2249 base + displacement movl disp(base),reg
2250 index + base movl (base,index),reg
2251 (index + base) + displacement movl disp(base,index),reg
2252 index*scale movl (,index,scale),reg
2253 index*scale + disp movl disp(,index,scale),reg
2254 index*scale + base movl (base,index,scale),reg
2255 (index*scale + base) + disp movl disp(base,index,scale),reg
2257 In each case, scale can be 1, 2, 4, 8. */
2259 /* This is exactly the same as print_operand_addr, except that
2260 it recognizes addresses instead of printing them.
2262 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2263 convert common non-canonical forms to canonical form so that they will
2266 #define ADDR_INVALID(msg,insn) \
2268 if (TARGET_DEBUG_ADDR) \
2270 fprintf (stderr, msg); \
2276 legitimate_address_p (mode
, addr
, strict
)
2277 enum machine_mode mode
;
2281 rtx base
= NULL_RTX
;
2282 rtx indx
= NULL_RTX
;
2283 rtx scale
= NULL_RTX
;
2284 rtx disp
= NULL_RTX
;
2286 if (TARGET_DEBUG_ADDR
)
2289 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2290 GET_MODE_NAME (mode
), strict
);
2295 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2296 base
= addr
; /* base reg */
2298 else if (GET_CODE (addr
) == PLUS
)
2300 rtx op0
= XEXP (addr
, 0);
2301 rtx op1
= XEXP (addr
, 1);
2302 enum rtx_code code0
= GET_CODE (op0
);
2303 enum rtx_code code1
= GET_CODE (op1
);
2305 if (code0
== REG
|| code0
== SUBREG
)
2307 if (code1
== REG
|| code1
== SUBREG
)
2309 indx
= op0
; /* index + base */
2315 base
= op0
; /* base + displacement */
2320 else if (code0
== MULT
)
2322 indx
= XEXP (op0
, 0);
2323 scale
= XEXP (op0
, 1);
2325 if (code1
== REG
|| code1
== SUBREG
)
2326 base
= op1
; /* index*scale + base */
2329 disp
= op1
; /* index*scale + disp */
2332 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2334 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2335 scale
= XEXP (XEXP (op0
, 0), 1);
2336 base
= XEXP (op0
, 1);
2340 else if (code0
== PLUS
)
2342 indx
= XEXP (op0
, 0); /* index + base + disp */
2343 base
= XEXP (op0
, 1);
2349 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2354 else if (GET_CODE (addr
) == MULT
)
2356 indx
= XEXP (addr
, 0); /* index*scale */
2357 scale
= XEXP (addr
, 1);
2361 disp
= addr
; /* displacement */
2363 /* Allow arg pointer and stack pointer as index if there is not scaling */
2364 if (base
&& indx
&& !scale
2365 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2372 /* Validate base register */
2373 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2374 is one word out of a two word structure, which is represented internally
2378 if (GET_CODE (base
) != REG
)
2380 ADDR_INVALID ("Base is not a register.\n", base
);
2384 if ((strict
&& !REG_OK_FOR_BASE_STRICT_P (base
))
2385 || (!strict
&& !REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2387 ADDR_INVALID ("Base is not valid.\n", base
);
2392 /* Validate index register */
2393 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2394 is one word out of a two word structure, which is represented internally
2398 if (GET_CODE (indx
) != REG
)
2400 ADDR_INVALID ("Index is not a register.\n", indx
);
2404 if ((strict
&& !REG_OK_FOR_INDEX_STRICT_P (indx
))
2405 || (!strict
&& !REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2407 ADDR_INVALID ("Index is not valid.\n", indx
);
2412 abort (); /* scale w/o index invalid */
2414 /* Validate scale factor */
2417 HOST_WIDE_INT value
;
2419 if (GET_CODE (scale
) != CONST_INT
)
2421 ADDR_INVALID ("Scale is not valid.\n", scale
);
2425 value
= INTVAL (scale
);
2426 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2428 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2433 /* Validate displacement
2434 Constant pool addresses must be handled special. They are
2435 considered legitimate addresses, but only if not used with regs.
2436 When printed, the output routines know to print the reference with the
2437 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2440 if (GET_CODE (disp
) == SYMBOL_REF
2441 && CONSTANT_POOL_ADDRESS_P (disp
)
2446 else if (!CONSTANT_ADDRESS_P (disp
))
2448 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2452 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2454 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2458 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2459 && base
!= pic_offset_table_rtx
2460 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2462 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2466 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2467 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2469 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp
);
2474 if (TARGET_DEBUG_ADDR
)
2475 fprintf (stderr
, "Address is valid.\n");
2477 /* Everything looks valid, return true */
2482 /* Return a legitimate reference for ORIG (an address) using the
2483 register REG. If REG is 0, a new pseudo is generated.
2485 There are three types of references that must be handled:
2487 1. Global data references must load the address from the GOT, via
2488 the PIC reg. An insn is emitted to do this load, and the reg is
2491 2. Static data references must compute the address as an offset
2492 from the GOT, whose base is in the PIC reg. An insn is emitted to
2493 compute the address into a reg, and the reg is returned. Static
2494 data objects have SYMBOL_REF_FLAG set to differentiate them from
2495 global data objects.
2497 3. Constant pool addresses must be handled special. They are
2498 considered legitimate addresses, but only if not used with regs.
2499 When printed, the output routines know to print the reference with the
2500 PIC reg, even though the PIC reg doesn't appear in the RTL.
2502 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2503 reg also appears in the address (except for constant pool references,
2506 "switch" statements also require special handling when generating
2507 PIC code. See comments by the `casesi' insn in i386.md for details. */
2510 legitimize_pic_address (orig
, reg
)
2517 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2519 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2524 reg
= gen_reg_rtx (Pmode
);
2526 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2527 || GET_CODE (addr
) == LABEL_REF
)
2528 new = gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
, orig
);
2530 new = gen_rtx (MEM
, Pmode
,
2531 gen_rtx (PLUS
, Pmode
,
2532 pic_offset_table_rtx
, orig
));
2534 emit_move_insn (reg
, new);
2536 current_function_uses_pic_offset_table
= 1;
2539 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2543 if (GET_CODE (addr
) == CONST
)
2545 addr
= XEXP (addr
, 0);
2546 if (GET_CODE (addr
) != PLUS
)
2550 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2554 reg
= gen_reg_rtx (Pmode
);
2556 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2557 addr
= legitimize_pic_address (XEXP (addr
, 1),
2558 base
== reg
? NULL_RTX
: reg
);
2560 if (GET_CODE (addr
) == CONST_INT
)
2561 return plus_constant (base
, INTVAL (addr
));
2563 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2565 base
= gen_rtx (PLUS
, Pmode
, base
, XEXP (addr
, 0));
2566 addr
= XEXP (addr
, 1);
2568 return gen_rtx (PLUS
, Pmode
, base
, addr
);
2574 /* Emit insns to move operands[1] into operands[0]. */
2577 emit_pic_move (operands
, mode
)
2579 enum machine_mode mode
;
2581 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2583 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2584 operands
[1] = (rtx
) force_reg (SImode
, operands
[1]);
2586 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2590 /* Try machine-dependent ways of modifying an illegitimate address
2591 to be legitimate. If we find one, return the new, valid address.
2592 This macro is used in only one place: `memory_address' in explow.c.
2594 OLDX is the address as it was before break_out_memory_refs was called.
2595 In some cases it is useful to look at this to decide what needs to be done.
2597 MODE and WIN are passed so that this macro can use
2598 GO_IF_LEGITIMATE_ADDRESS.
2600 It is always safe for this macro to do nothing. It exists to recognize
2601 opportunities to optimize the output.
2603 For the 80386, we handle X+REG by loading X into a register R and
2604 using R+REG. R will go in a general reg and indexing will be used.
2605 However, if REG is a broken-out memory address or multiplication,
2606 nothing needs to be done because REG can certainly go in a general reg.
2608 When -fpic is used, special handling is needed for symbolic references.
2609 See comments by legitimize_pic_address in i386.c for details. */
2612 legitimize_address (x
, oldx
, mode
)
2615 enum machine_mode mode
;
2620 if (TARGET_DEBUG_ADDR
)
2622 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode
));
2626 if (flag_pic
&& SYMBOLIC_CONST (x
))
2627 return legitimize_pic_address (x
, 0);
2629 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2630 if (GET_CODE (x
) == ASHIFT
2631 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2632 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2635 x
= gen_rtx (MULT
, Pmode
,
2636 force_reg (Pmode
, XEXP (x
, 0)),
2637 GEN_INT (1 << log
));
2640 if (GET_CODE (x
) == PLUS
)
2642 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2643 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2644 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2645 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2648 XEXP (x
, 0) = gen_rtx (MULT
, Pmode
,
2649 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2650 GEN_INT (1 << log
));
2653 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2654 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2655 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2658 XEXP (x
, 1) = gen_rtx (MULT
, Pmode
,
2659 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2660 GEN_INT (1 << log
));
2663 /* Put multiply first if it isn't already */
2664 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2666 rtx tmp
= XEXP (x
, 0);
2667 XEXP (x
, 0) = XEXP (x
, 1);
2672 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2673 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2674 created by virtual register instantiation, register elimination, and
2675 similar optimizations. */
2676 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2679 x
= gen_rtx (PLUS
, Pmode
,
2680 gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)),
2681 XEXP (XEXP (x
, 1), 1));
2684 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2685 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2686 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2687 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2688 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2689 && CONSTANT_P (XEXP (x
, 1)))
2691 rtx constant
, other
;
2693 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2695 constant
= XEXP (x
, 1);
2696 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2698 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2700 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2701 other
= XEXP (x
, 1);
2709 x
= gen_rtx (PLUS
, Pmode
,
2710 gen_rtx (PLUS
, Pmode
, XEXP (XEXP (x
, 0), 0),
2711 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2712 plus_constant (other
, INTVAL (constant
)));
2716 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2719 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2722 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2725 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2728 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2732 && GET_CODE (XEXP (x
, 1)) == REG
2733 && GET_CODE (XEXP (x
, 0)) == REG
)
2736 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2739 x
= legitimize_pic_address (x
, 0);
2742 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2745 if (GET_CODE (XEXP (x
, 0)) == REG
)
2747 register rtx temp
= gen_reg_rtx (Pmode
);
2748 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2750 emit_move_insn (temp
, val
);
2756 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2758 register rtx temp
= gen_reg_rtx (Pmode
);
2759 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2761 emit_move_insn (temp
, val
);
2772 /* Print an integer constant expression in assembler syntax. Addition
2773 and subtraction are the only arithmetic that may appear in these
2774 expressions. FILE is the stdio stream to write to, X is the rtx, and
2775 CODE is the operand print code from the output string. */
2778 output_pic_addr_const (file
, x
, code
)
2785 switch (GET_CODE (x
))
2796 if (GET_CODE (x
) == SYMBOL_REF
)
2797 assemble_name (file
, XSTR (x
, 0));
2800 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2801 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2802 assemble_name (asm_out_file
, buf
);
2805 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2806 fprintf (file
, "@GOTOFF(%%ebx)");
2807 else if (code
== 'P')
2808 fprintf (file
, "@PLT");
2809 else if (GET_CODE (x
) == LABEL_REF
)
2810 fprintf (file
, "@GOTOFF");
2811 else if (! SYMBOL_REF_FLAG (x
))
2812 fprintf (file
, "@GOT");
2814 fprintf (file
, "@GOTOFF");
2819 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2820 assemble_name (asm_out_file
, buf
);
2824 fprintf (file
, "%d", INTVAL (x
));
2828 /* This used to output parentheses around the expression,
2829 but that does not work on the 386 (either ATT or BSD assembler). */
2830 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2834 if (GET_MODE (x
) == VOIDmode
)
2836 /* We can use %d if the number is <32 bits and positive. */
2837 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2838 fprintf (file
, "0x%x%08x",
2839 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2841 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2844 /* We can't handle floating point constants;
2845 PRINT_OPERAND must handle them. */
2846 output_operand_lossage ("floating constant misused");
2850 /* Some assemblers need integer constants to appear last (eg masm). */
2851 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
2853 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2854 if (INTVAL (XEXP (x
, 0)) >= 0)
2855 fprintf (file
, "+");
2856 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2860 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2861 if (INTVAL (XEXP (x
, 1)) >= 0)
2862 fprintf (file
, "+");
2863 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2868 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2869 fprintf (file
, "-");
2870 output_pic_addr_const (file
, XEXP (x
, 1), code
);
2874 output_operand_lossage ("invalid expression as operand");
2878 /* Append the correct conditional move suffix which corresponds to CODE */
2881 put_condition_code (code
, reverse_cc
, mode
, file
)
2884 enum mode_class mode
;
2888 ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
2889 && ! (cc_prev_status
.flags
& CC_FCOMI
));
2890 if (reverse_cc
&& ! ieee
)
2891 code
= reverse_condition (code
);
2893 if (mode
== MODE_INT
)
2897 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2903 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
2909 fputs ("ge", file
); return;
2911 fputs ("g", file
); return;
2913 fputs ("le", file
); return;
2915 fputs ("l", file
); return;
2917 fputs ("ae", file
); return;
2919 fputs ("a", file
); return;
2921 fputs ("be", file
); return;
2923 fputs ("b", file
); return;
2924 default: output_operand_lossage ("Invalid %%C operand");
2926 else if (mode
== MODE_FLOAT
)
2930 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
); return;
2932 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
); return;
2934 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
); return;
2936 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
); return;
2938 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
); return;
2940 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
); return;
2942 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
); return;
2944 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
); return;
2946 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
); return;
2948 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
); return;
2949 default: output_operand_lossage ("Invalid %%C operand");
2954 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2955 C -- print opcode suffix for set/cmov insn.
2956 c -- like C, but print reversed condition
2957 F -- print opcode suffix for fcmov insn.
2958 f -- like C, but print reversed condition
2959 R -- print the prefix for register names.
2960 z -- print the opcode suffix for the size of the current operand.
2961 * -- print a star (in certain assembler syntax)
2962 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2963 c -- don't print special prefixes before constant operands.
2964 J -- print the appropriate jump operand.
2965 s -- print a shift double count, followed by the assemblers argument
2967 b -- print the QImode name of the register for the indicated operand.
2968 %b0 would print %al if operands[0] is reg 0.
2969 w -- likewise, print the HImode name of the register.
2970 k -- likewise, print the SImode name of the register.
2971 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2972 y -- print "st(0)" instead of "st" as a register.
2973 P -- print as a PIC constant
2977 print_operand (file
, x
, code
)
2992 PUT_OP_SIZE (code
, 'l', file
);
2996 PUT_OP_SIZE (code
, 'w', file
);
3000 PUT_OP_SIZE (code
, 'b', file
);
3004 PUT_OP_SIZE (code
, 'l', file
);
3008 PUT_OP_SIZE (code
, 's', file
);
3012 PUT_OP_SIZE (code
, 't', file
);
3016 /* 387 opcodes don't get size suffixes if the operands are
3019 if (STACK_REG_P (x
))
3022 /* this is the size of op from size of operand */
3023 switch (GET_MODE_SIZE (GET_MODE (x
)))
3026 PUT_OP_SIZE ('B', 'b', file
);
3030 PUT_OP_SIZE ('W', 'w', file
);
3034 if (GET_MODE (x
) == SFmode
)
3036 PUT_OP_SIZE ('S', 's', file
);
3040 PUT_OP_SIZE ('L', 'l', file
);
3044 PUT_OP_SIZE ('T', 't', file
);
3048 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3050 #ifdef GAS_MNEMONICS
3051 PUT_OP_SIZE ('Q', 'q', file
);
3054 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3058 PUT_OP_SIZE ('Q', 'l', file
);
3071 switch (GET_CODE (x
))
3073 /* These conditions are appropriate for testing the result
3074 of an arithmetic operation, not for a compare operation.
3075 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3076 CC_Z_IN_NOT_C false and not floating point. */
3077 case NE
: fputs ("jne", file
); return;
3078 case EQ
: fputs ("je", file
); return;
3079 case GE
: fputs ("jns", file
); return;
3080 case LT
: fputs ("js", file
); return;
3081 case GEU
: fputs ("jmp", file
); return;
3082 case GTU
: fputs ("jne", file
); return;
3083 case LEU
: fputs ("je", file
); return;
3084 case LTU
: fputs ("#branch never", file
); return;
3086 /* no matching branches for GT nor LE */
3091 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3093 PRINT_OPERAND (file
, x
, 0);
3094 fputs (AS2C (,) + 1, file
);
3098 /* This is used by the conditional move instructions. */
3100 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3103 /* like above, but reverse condition */
3105 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3108 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3111 /* like above, but reverse condition */
3113 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3120 sprintf (str
, "invalid operand code `%c'", code
);
3121 output_operand_lossage (str
);
3125 if (GET_CODE (x
) == REG
)
3127 PRINT_REG (x
, code
, file
);
3129 else if (GET_CODE (x
) == MEM
)
3131 PRINT_PTR (x
, file
);
3132 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3135 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3137 output_addr_const (file
, XEXP (x
, 0));
3140 output_address (XEXP (x
, 0));
3142 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3144 REAL_VALUE_TYPE r
; long l
;
3145 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3146 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3147 PRINT_IMMED_PREFIX (file
);
3148 fprintf (file
, "0x%x", l
);
3150 /* These float cases don't actually occur as immediate operands. */
3151 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3153 REAL_VALUE_TYPE r
; char dstr
[30];
3154 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3155 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3156 fprintf (file
, "%s", dstr
);
3158 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3160 REAL_VALUE_TYPE r
; char dstr
[30];
3161 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3162 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3163 fprintf (file
, "%s", dstr
);
3169 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3170 PRINT_IMMED_PREFIX (file
);
3171 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3172 || GET_CODE (x
) == LABEL_REF
)
3173 PRINT_OFFSET_PREFIX (file
);
3176 output_pic_addr_const (file
, x
, code
);
3178 output_addr_const (file
, x
);
3182 /* Print a memory operand whose address is ADDR. */
3185 print_operand_address (file
, addr
)
3189 register rtx reg1
, reg2
, breg
, ireg
;
3192 switch (GET_CODE (addr
))
3196 fprintf (file
, "%se", RP
);
3197 fputs (hi_reg_name
[REGNO (addr
)], file
);
3207 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3209 offset
= XEXP (addr
, 0);
3210 addr
= XEXP (addr
, 1);
3212 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3214 offset
= XEXP (addr
, 1);
3215 addr
= XEXP (addr
, 0);
3217 if (GET_CODE (addr
) != PLUS
) ;
3218 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3220 reg1
= XEXP (addr
, 0);
3221 addr
= XEXP (addr
, 1);
3223 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3225 reg1
= XEXP (addr
, 1);
3226 addr
= XEXP (addr
, 0);
3228 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3230 reg1
= XEXP (addr
, 0);
3231 addr
= XEXP (addr
, 1);
3233 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3235 reg1
= XEXP (addr
, 1);
3236 addr
= XEXP (addr
, 0);
3238 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3240 if (reg1
== 0) reg1
= addr
;
3246 if (addr
!= 0) abort ();
3249 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3250 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3255 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3261 if (ireg
!= 0 || breg
!= 0)
3268 output_pic_addr_const (file
, addr
, 0);
3270 else if (GET_CODE (addr
) == LABEL_REF
)
3271 output_asm_label (addr
);
3274 output_addr_const (file
, addr
);
3277 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3279 scale
= INTVAL (XEXP (ireg
, 1));
3280 ireg
= XEXP (ireg
, 0);
3283 /* The stack pointer can only appear as a base register,
3284 never an index register, so exchange the regs if it is wrong. */
3286 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3295 /* output breg+ireg*scale */
3296 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3303 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3305 scale
= INTVAL (XEXP (addr
, 0));
3306 ireg
= XEXP (addr
, 1);
3310 scale
= INTVAL (XEXP (addr
, 1));
3311 ireg
= XEXP (addr
, 0);
3313 output_addr_const (file
, const0_rtx
);
3314 PRINT_B_I_S ((rtx
) 0, ireg
, scale
, file
);
3319 if (GET_CODE (addr
) == CONST_INT
3320 && INTVAL (addr
) < 0x8000
3321 && INTVAL (addr
) >= -0x8000)
3322 fprintf (file
, "%d", INTVAL (addr
));
3326 output_pic_addr_const (file
, addr
, 0);
3328 output_addr_const (file
, addr
);
3333 /* Set the cc_status for the results of an insn whose pattern is EXP.
3334 On the 80386, we assume that only test and compare insns, as well
3335 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3336 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3337 Also, we assume that jumps, moves and sCOND don't affect the condition
3338 codes. All else clobbers the condition codes, by assumption.
3340 We assume that ALL integer add, minus, etc. instructions effect the
3341 condition codes. This MUST be consistent with i386.md.
3343 We don't record any float test or compare - the redundant test &
3344 compare check in final.c does not handle stack-like regs correctly. */
3347 notice_update_cc (exp
)
3350 if (GET_CODE (exp
) == SET
)
3352 /* Jumps do not alter the cc's. */
3353 if (SET_DEST (exp
) == pc_rtx
)
3355 #ifdef IS_STACK_MODE
3356 /* Moving into a memory of stack_mode may have been moved
3357 in between the use and set of cc0 by loop_spl(). So
3358 old value of cc.status must be retained */
3359 if(GET_CODE(SET_DEST(exp
))==MEM
3360 && IS_STACK_MODE(GET_MODE(SET_DEST(exp
))))
3365 /* Moving register or memory into a register:
3366 it doesn't alter the cc's, but it might invalidate
3367 the RTX's which we remember the cc's came from.
3368 (Note that moving a constant 0 or 1 MAY set the cc's). */
3369 if (REG_P (SET_DEST (exp
))
3370 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3371 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3373 if (cc_status
.value1
3374 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3375 cc_status
.value1
= 0;
3376 if (cc_status
.value2
3377 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3378 cc_status
.value2
= 0;
3381 /* Moving register into memory doesn't alter the cc's.
3382 It may invalidate the RTX's which we remember the cc's came from. */
3383 if (GET_CODE (SET_DEST (exp
)) == MEM
3384 && (REG_P (SET_SRC (exp
))
3385 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3387 if (cc_status
.value1
&& GET_CODE (cc_status
.value1
) == MEM
3388 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3389 cc_status
.value1
= 0;
3390 if (cc_status
.value2
&& GET_CODE (cc_status
.value2
) == MEM
3391 || reg_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3392 cc_status
.value2
= 0;
3395 /* Function calls clobber the cc's. */
3396 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3401 /* Tests and compares set the cc's in predictable ways. */
3402 else if (SET_DEST (exp
) == cc0_rtx
)
3405 cc_status
.value1
= SET_SRC (exp
);
3408 /* Certain instructions effect the condition codes. */
3409 else if (GET_MODE (SET_SRC (exp
)) == SImode
3410 || GET_MODE (SET_SRC (exp
)) == HImode
3411 || GET_MODE (SET_SRC (exp
)) == QImode
)
3412 switch (GET_CODE (SET_SRC (exp
)))
3414 case ASHIFTRT
: case LSHIFTRT
:
3416 /* Shifts on the 386 don't set the condition codes if the
3417 shift count is zero. */
3418 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3423 /* We assume that the CONST_INT is non-zero (this rtx would
3424 have been deleted if it were zero. */
3426 case PLUS
: case MINUS
: case NEG
:
3427 case AND
: case IOR
: case XOR
:
3428 cc_status
.flags
= CC_NO_OVERFLOW
;
3429 cc_status
.value1
= SET_SRC (exp
);
3430 cc_status
.value2
= SET_DEST (exp
);
3441 else if (GET_CODE (exp
) == PARALLEL
3442 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3444 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3446 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3449 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3451 cc_status
.flags
|= CC_IN_80387
;
3452 if (TARGET_CMOVE
&& stack_regs_mentioned_p
3453 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3454 cc_status
.flags
|= CC_FCOMI
;
3457 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3468 /* Split one or more DImode RTL references into pairs of SImode
3469 references. The RTL can be REG, offsettable MEM, integer constant, or
3470 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3471 split and "num" is its length. lo_half and hi_half are output arrays
3472 that parallel "operands". */
3475 split_di (operands
, num
, lo_half
, hi_half
)
3478 rtx lo_half
[], hi_half
[];
3482 if (GET_CODE (operands
[num
]) == REG
)
3484 lo_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]));
3485 hi_half
[num
] = gen_rtx (REG
, SImode
, REGNO (operands
[num
]) + 1);
3487 else if (CONSTANT_P (operands
[num
]))
3489 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3491 else if (offsettable_memref_p (operands
[num
]))
3493 lo_half
[num
] = operands
[num
];
3494 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3501 /* Return 1 if this is a valid binary operation on a 387.
3502 OP is the expression matched, and MODE is its mode. */
3505 binary_387_op (op
, mode
)
3507 enum machine_mode mode
;
3509 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3512 switch (GET_CODE (op
))
3518 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3526 /* Return 1 if this is a valid shift or rotate operation on a 386.
3527 OP is the expression matched, and MODE is its mode. */
3532 enum machine_mode mode
;
3534 rtx operand
= XEXP (op
, 0);
3536 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3539 if (GET_MODE (operand
) != GET_MODE (op
)
3540 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3543 return (GET_CODE (op
) == ASHIFT
3544 || GET_CODE (op
) == ASHIFTRT
3545 || GET_CODE (op
) == LSHIFTRT
3546 || GET_CODE (op
) == ROTATE
3547 || GET_CODE (op
) == ROTATERT
);
3550 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3551 MODE is not used. */
3554 VOIDmode_compare_op (op
, mode
)
3556 enum machine_mode mode
;
3558 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3561 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3562 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3563 is the expression of the binary operation. The output may either be
3564 emitted here, or returned to the caller, like all output_* functions.
3566 There is no guarantee that the operands are the same mode, as they
3567 might be within FLOAT or FLOAT_EXTEND expressions. */
3570 output_387_binary_op (insn
, operands
)
3576 static char buf
[100];
3578 switch (GET_CODE (operands
[3]))
3581 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3582 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3589 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3590 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3597 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3598 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3605 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3606 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3616 strcpy (buf
, base_op
);
3618 switch (GET_CODE (operands
[3]))
3622 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3625 operands
[2] = operands
[1];
3629 if (GET_CODE (operands
[2]) == MEM
)
3630 return strcat (buf
, AS1 (%z2
,%2));
3632 if (NON_STACK_REG_P (operands
[1]))
3634 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3637 else if (NON_STACK_REG_P (operands
[2]))
3639 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3643 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3644 return strcat (buf
, AS2 (p
,%2,%0));
3646 if (STACK_TOP_P (operands
[0]))
3647 return strcat (buf
, AS2C (%y2
,%0));
3649 return strcat (buf
, AS2C (%2,%0));
3653 if (GET_CODE (operands
[1]) == MEM
)
3654 return strcat (buf
, AS1 (r
%z1
,%1));
3656 if (GET_CODE (operands
[2]) == MEM
)
3657 return strcat (buf
, AS1 (%z2
,%2));
3659 if (NON_STACK_REG_P (operands
[1]))
3661 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3664 else if (NON_STACK_REG_P (operands
[2]))
3666 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3670 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3673 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3674 return strcat (buf
, AS2 (rp
,%2,%0));
3676 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3677 return strcat (buf
, AS2 (p
,%1,%0));
3679 if (STACK_TOP_P (operands
[0]))
3681 if (STACK_TOP_P (operands
[1]))
3682 return strcat (buf
, AS2C (%y2
,%0));
3684 return strcat (buf
, AS2 (r
,%y1
,%0));
3686 else if (STACK_TOP_P (operands
[1]))
3687 return strcat (buf
, AS2C (%1,%0));
3689 return strcat (buf
, AS2 (r
,%2,%0));
3696 /* Output code for INSN to convert a float to a signed int. OPERANDS
3697 are the insn operands. The output may be SFmode or DFmode and the
3698 input operand may be SImode or DImode. As a special case, make sure
3699 that the 387 stack top dies if the output mode is DImode, because the
3700 hardware requires this. */
3703 output_fix_trunc (insn
, operands
)
3707 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3710 if (! STACK_TOP_P (operands
[1]) ||
3711 (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3714 xops
[0] = GEN_INT (12);
3715 xops
[1] = operands
[4];
3717 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3718 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3719 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3720 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3721 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3723 if (NON_STACK_REG_P (operands
[0]))
3724 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
3725 else if (GET_CODE (operands
[0]) == MEM
)
3728 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3730 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3735 return AS1 (fldc
%W2
,%2);
3738 /* Output code for INSN to compare OPERANDS. The two operands might
3739 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3740 expression. If the compare is in mode CCFPEQmode, use an opcode that
3741 will not fault if a qNaN is present. */
3744 output_float_compare (insn
, operands
)
3749 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3750 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3753 if (TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
3755 cc_status
.flags
|= CC_FCOMI
;
3756 cc_prev_status
.flags
&= ~CC_TEST_AX
;
3759 if (! STACK_TOP_P (operands
[0]))
3762 operands
[0] = operands
[1];
3764 cc_status
.flags
|= CC_REVERSED
;
3767 if (! STACK_TOP_P (operands
[0]))
3770 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3772 if (STACK_REG_P (operands
[1])
3774 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3775 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3777 /* If both the top of the 387 stack dies, and the other operand
3778 is also a stack register that dies, then this must be a
3779 `fcompp' float compare */
3781 if (unordered_compare
)
3782 if (cc_status
.flags
& CC_FCOMI
)
3784 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
3785 output_asm_insn (AS1 (fstp
, %y0
), operands
);
3789 output_asm_insn ("fucompp", operands
);
3792 if (cc_status
.flags
& CC_FCOMI
)
3794 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
3795 output_asm_insn (AS1 (fstp
, %y0
), operands
);
3799 output_asm_insn ("fcompp", operands
);
3804 static char buf
[100];
3806 /* Decide if this is the integer or float compare opcode, or the
3807 unordered float compare. */
3809 if (unordered_compare
)
3810 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
3811 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
3812 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
3814 strcpy (buf
, "ficom");
3816 /* Modify the opcode if the 387 stack is to be popped. */
3821 if (NON_STACK_REG_P (operands
[1]))
3822 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3823 else if (cc_status
.flags
& CC_FCOMI
)
3825 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
3829 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
3832 /* Now retrieve the condition code. */
3834 return output_fp_cc0_set (insn
);
3837 /* Output opcodes to transfer the results of FP compare or test INSN
3838 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3839 result of the compare or test is unordered, no comparison operator
3840 succeeds except NE. Return an output template, if any. */
3843 output_fp_cc0_set (insn
)
3847 rtx unordered_label
;
3851 xops
[0] = gen_rtx (REG
, HImode
, 0);
3852 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
3854 if (! TARGET_IEEE_FP
)
3856 if (!(cc_status
.flags
& CC_REVERSED
))
3858 next
= next_cc0_user (insn
);
3860 if (GET_CODE (next
) == JUMP_INSN
3861 && GET_CODE (PATTERN (next
)) == SET
3862 && SET_DEST (PATTERN (next
)) == pc_rtx
3863 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3865 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3867 else if (GET_CODE (PATTERN (next
)) == SET
)
3869 code
= GET_CODE (SET_SRC (PATTERN (next
)));
3875 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
3876 || code
== LE
|| code
== GE
)
3877 { /* We will test eax directly */
3878 cc_status
.flags
|= CC_TEST_AX
;
3885 next
= next_cc0_user (insn
);
3886 if (next
== NULL_RTX
)
3889 if (GET_CODE (next
) == JUMP_INSN
3890 && GET_CODE (PATTERN (next
)) == SET
3891 && SET_DEST (PATTERN (next
)) == pc_rtx
3892 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3894 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3896 else if (GET_CODE (PATTERN (next
)) == SET
)
3898 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
3899 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
3900 else code
= GET_CODE (SET_SRC (PATTERN (next
)));
3902 else if (GET_CODE (PATTERN (next
)) == PARALLEL
3903 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
3905 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
3906 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
3907 else code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
3912 xops
[0] = gen_rtx (REG
, QImode
, 0);
3917 xops
[1] = GEN_INT (0x45);
3918 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3923 xops
[1] = GEN_INT (0x45);
3924 xops
[2] = GEN_INT (0x01);
3925 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3926 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3931 xops
[1] = GEN_INT (0x05);
3932 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3937 xops
[1] = GEN_INT (0x45);
3938 xops
[2] = GEN_INT (0x40);
3939 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3940 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
3941 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3946 xops
[1] = GEN_INT (0x45);
3947 xops
[2] = GEN_INT (0x40);
3948 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3949 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
3954 xops
[1] = GEN_INT (0x44);
3955 xops
[2] = GEN_INT (0x40);
3956 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
3957 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
3971 #define MAX_386_STACK_LOCALS 2
3973 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3975 /* Define the structure for the machine field in struct function. */
3976 struct machine_function
3978 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
3981 /* Functions to save and restore i386_stack_locals.
3982 These will be called, via pointer variables,
3983 from push_function_context and pop_function_context. */
3986 save_386_machine_status (p
)
3989 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
3990 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
3991 sizeof i386_stack_locals
);
3995 restore_386_machine_status (p
)
3998 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
3999 sizeof i386_stack_locals
);
4003 /* Clear stack slot assignments remembered from previous functions.
4004 This is called from INIT_EXPANDERS once before RTL is emitted for each
4008 clear_386_stack_locals ()
4010 enum machine_mode mode
;
4013 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4014 mode
= (enum machine_mode
) ((int) mode
+ 1))
4015 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4016 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4018 /* Arrange to save and restore i386_stack_locals around nested functions. */
4019 save_machine_status
= save_386_machine_status
;
4020 restore_machine_status
= restore_386_machine_status
;
4023 /* Return a MEM corresponding to a stack slot with mode MODE.
4024 Allocate a new slot if necessary.
4026 The RTL for a function can have several slots available: N is
4027 which slot to use. */
4030 assign_386_stack_local (mode
, n
)
4031 enum machine_mode mode
;
4034 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4037 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4038 i386_stack_locals
[(int) mode
][n
]
4039 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4041 return i386_stack_locals
[(int) mode
][n
];
4047 enum machine_mode mode
;
4049 return (GET_CODE (op
) == MULT
);
4054 enum machine_mode mode
;
4056 return (GET_CODE (op
) == DIV
);
4061 /* Create a new copy of an rtx.
4062 Recursively copies the operands of the rtx,
4063 except for those few rtx codes that are sharable.
4064 Doesn't share CONST */
4072 register RTX_CODE code
;
4073 register char *format_ptr
;
4075 code
= GET_CODE (orig
);
4088 /* SCRATCH must be shared because they represent distinct values. */
4093 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4094 a LABEL_REF, it isn't sharable. */
4095 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4096 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4097 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4101 /* A MEM with a constant address is not sharable. The problem is that
4102 the constant address may need to be reloaded. If the mem is shared,
4103 then reloading one copy of this mem will cause all copies to appear
4104 to have been reloaded. */
4107 copy
= rtx_alloc (code
);
4108 PUT_MODE (copy
, GET_MODE (orig
));
4109 copy
->in_struct
= orig
->in_struct
;
4110 copy
->volatil
= orig
->volatil
;
4111 copy
->unchanging
= orig
->unchanging
;
4112 copy
->integrated
= orig
->integrated
;
4114 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4116 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4118 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4120 switch (*format_ptr
++)
4123 XEXP (copy
, i
) = XEXP (orig
, i
);
4124 if (XEXP (orig
, i
) != NULL
)
4125 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4130 XEXP (copy
, i
) = XEXP (orig
, i
);
4135 XVEC (copy
, i
) = XVEC (orig
, i
);
4136 if (XVEC (orig
, i
) != NULL
)
4138 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4139 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4140 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4145 XWINT (copy
, i
) = XWINT (orig
, i
);
4149 XINT (copy
, i
) = XINT (orig
, i
);
4154 XSTR (copy
, i
) = XSTR (orig
, i
);
4165 /* try to rewrite a memory address to make it valid */
4167 rewrite_address (mem_rtx
)
4170 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4172 int offset_adjust
= 0;
4173 int was_only_offset
= 0;
4174 rtx mem_addr
= XEXP (mem_rtx
, 0);
4175 char *storage
= (char *) oballoc (0);
4177 int is_spill_rtx
= 0;
4179 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4180 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4182 if (GET_CODE (mem_addr
) == PLUS
&&
4183 GET_CODE (XEXP (mem_addr
, 1)) == PLUS
&&
4184 GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4185 { /* this part is utilized by the combiner */
4187 gen_rtx (PLUS
, GET_MODE (mem_addr
),
4188 gen_rtx (PLUS
, GET_MODE (XEXP (mem_addr
, 1)),
4190 XEXP (XEXP (mem_addr
, 1), 0)),
4191 XEXP (XEXP (mem_addr
, 1), 1));
4192 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4194 XEXP (mem_rtx
, 0) = ret_rtx
;
4195 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4201 /* this part is utilized by loop.c */
4202 /* If the address contains PLUS (reg,const) and this pattern is invalid
4203 in this case - try to rewrite the address to make it valid intel1
4205 storage
= (char *) oballoc (0);
4206 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4207 /* find the base index and offset elements of the memory address */
4208 if (GET_CODE (mem_addr
) == PLUS
)
4210 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4212 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4214 base_rtx
= XEXP (mem_addr
, 1);
4215 index_rtx
= XEXP (mem_addr
, 0);
4219 base_rtx
= XEXP (mem_addr
, 0);
4220 offset_rtx
= XEXP (mem_addr
, 1);
4223 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4225 index_rtx
= XEXP (mem_addr
, 0);
4226 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4228 base_rtx
= XEXP (mem_addr
, 1);
4232 offset_rtx
= XEXP (mem_addr
, 1);
4235 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4238 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
&&
4239 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
&&
4240 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0)) == REG
&&
4241 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1)) == CONST_INT
&&
4242 GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1)) == CONST_INT
&&
4243 GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
&&
4244 GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4246 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4247 offset_rtx
= XEXP (mem_addr
, 1);
4248 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4249 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4253 offset_rtx
= XEXP (mem_addr
, 1);
4254 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4255 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4258 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4260 was_only_offset
= 1;
4263 offset_rtx
= XEXP (mem_addr
, 1);
4264 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4265 if (offset_adjust
== 0)
4267 XEXP (mem_rtx
, 0) = offset_rtx
;
4268 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4278 else if (GET_CODE (mem_addr
) == MULT
)
4280 index_rtx
= mem_addr
;
4287 if (index_rtx
&& GET_CODE (index_rtx
) == MULT
)
4289 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4294 scale_rtx
= XEXP (index_rtx
, 1);
4295 scale
= INTVAL (scale_rtx
);
4296 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4298 /* now find which of the elements are invalid and try to fix them */
4299 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4301 offset_adjust
= INTVAL (index_rtx
) * scale
;
4302 if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST
&&
4303 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4305 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4306 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4308 offset_rtx
= copy_all_rtx (offset_rtx
);
4309 XEXP (XEXP (offset_rtx
, 0), 1) =
4310 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4311 if (!CONSTANT_P (offset_rtx
))
4318 else if (offset_rtx
&& GET_CODE (offset_rtx
) == SYMBOL_REF
)
4321 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4322 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4324 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4325 if (!CONSTANT_P (offset_rtx
))
4331 else if (offset_rtx
&& GET_CODE (offset_rtx
) == CONST_INT
)
4333 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4335 else if (!offset_rtx
)
4337 offset_rtx
= gen_rtx (CONST_INT
, 0, 0);
4339 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4340 XEXP (mem_rtx
, 0) = offset_rtx
;
4343 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
&&
4344 GET_CODE (XEXP (base_rtx
, 0)) == REG
&&
4345 GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4347 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4348 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4350 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4352 offset_adjust
+= INTVAL (base_rtx
);
4355 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
&&
4356 GET_CODE (XEXP (index_rtx
, 0)) == REG
&&
4357 GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4359 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4360 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4364 if (!LEGITIMATE_INDEX_P (index_rtx
)
4365 && !(index_rtx
== stack_pointer_rtx
&& scale
== 1 && base_rtx
== NULL
))
4373 if (!LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4379 if (offset_adjust
!= 0)
4383 if (GET_CODE (offset_rtx
) == CONST
&&
4384 GET_CODE (XEXP (offset_rtx
, 0)) == PLUS
)
4386 if (GET_CODE (XEXP (XEXP (offset_rtx
, 0), 0)) == SYMBOL_REF
&&
4387 GET_CODE (XEXP (XEXP (offset_rtx
, 0), 1)) == CONST_INT
)
4389 offset_rtx
= copy_all_rtx (offset_rtx
);
4390 XEXP (XEXP (offset_rtx
, 0), 1) =
4391 gen_rtx (CONST_INT
, 0, INTVAL (XEXP (XEXP (offset_rtx
, 0), 1)) + offset_adjust
);
4392 if (!CONSTANT_P (offset_rtx
))
4399 else if (GET_CODE (offset_rtx
) == SYMBOL_REF
)
4402 gen_rtx (CONST
, GET_MODE (offset_rtx
),
4403 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4405 gen_rtx (CONST_INT
, 0, offset_adjust
)));
4406 if (!CONSTANT_P (offset_rtx
))
4412 else if (GET_CODE (offset_rtx
) == CONST_INT
)
4414 offset_rtx
= gen_rtx (CONST_INT
, 0, INTVAL (offset_rtx
) + offset_adjust
);
4424 offset_rtx
= gen_rtx (CONST_INT
, 0, offset_adjust
);
4432 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4433 INTVAL (offset_rtx
) == 0)
4435 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
),
4436 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4442 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4443 gen_rtx (PLUS
, GET_MODE (base_rtx
),
4444 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4452 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4453 INTVAL (offset_rtx
) == 0)
4455 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, base_rtx
);
4459 ret_rtx
= gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4460 gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
,
4470 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4471 INTVAL (offset_rtx
) == 0)
4473 ret_rtx
= gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
, scale_rtx
);
4478 gen_rtx (PLUS
, GET_MODE (offset_rtx
),
4479 gen_rtx (MULT
, GET_MODE (index_rtx
), index_rtx
,
4486 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4487 INTVAL (offset_rtx
) == 0)
4489 ret_rtx
= index_rtx
;
4493 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
), index_rtx
, offset_rtx
);
4502 if (GET_CODE (offset_rtx
) == CONST_INT
&&
4503 INTVAL (offset_rtx
) == 0)
4509 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
, offset_rtx
);
4512 else if (was_only_offset
)
4514 ret_rtx
= offset_rtx
;
4522 XEXP (mem_rtx
, 0) = ret_rtx
;
4523 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4535 /* return 1 if the first insn to set cc before insn also sets the register
4536 reg_rtx - otherwise return 0 */
4538 last_to_set_cc (reg_rtx
, insn
)
4541 rtx prev_insn
= PREV_INSN (insn
);
4545 if (GET_CODE (prev_insn
) == NOTE
)
4548 else if (GET_CODE (prev_insn
) == INSN
)
4550 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4553 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4555 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4561 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4568 prev_insn
= PREV_INSN (prev_insn
);
4576 doesnt_set_condition_code (pat
)
4579 switch (GET_CODE (pat
))
4593 sets_condition_code (pat
)
4596 switch (GET_CODE (pat
))
4620 str_immediate_operand (op
, mode
)
4622 enum machine_mode mode
;
4624 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4636 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4637 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4638 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4639 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4648 Return 1 if the mode of the SET_DEST of insn is floating point
4649 and it is not an fld or a move from memory to memory.
4650 Otherwise return 0 */
4655 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4656 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4657 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4658 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4659 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4660 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4661 && GET_CODE (SET_SRC (insn
)) != MEM
)
4670 Return 1 if the mode of the SET_DEST floating point and is memory
4671 and the source is a register.
4677 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4678 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4679 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4680 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4681 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4682 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4692 Return 1 if dep_insn sets a register which insn uses as a base
4693 or index to reference memory.
4694 otherwise return 0 */
4697 agi_dependent (insn
, dep_insn
)
4700 if (GET_CODE (dep_insn
) == INSN
4701 && GET_CODE (PATTERN (dep_insn
)) == SET
4702 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4704 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
));
4707 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4708 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4709 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4710 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4712 return (reg_mentioned_in_mem (stack_pointer_rtx
, insn
));
4720 Return 1 if reg is used in rtl as a base or index for a memory ref
4721 otherwise return 0. */
4724 reg_mentioned_in_mem (reg
, rtl
)
4729 register enum rtx_code code
;
4734 code
= GET_CODE (rtl
);
4752 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4755 fmt
= GET_RTX_FORMAT (code
);
4756 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4761 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4763 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4768 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4775 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4777 operands[0] = result, initialized with the startaddress
4778 operands[1] = alignment of the address.
4779 operands[2] = scratch register, initialized with the startaddress when
4780 not aligned, otherwise undefined
4782 This is just the body. It needs the initialisations mentioned above and
4783 some address computing at the end. These things are done in i386.md. */
4786 output_strlen_unroll (operands
)
4791 xops
[0] = operands
[0]; /* Result */
4792 /* operands[1]; * Alignment */
4793 xops
[1] = operands
[2]; /* Scratch */
4794 xops
[2] = GEN_INT (0);
4795 xops
[3] = GEN_INT (2);
4796 xops
[4] = GEN_INT (3);
4797 xops
[5] = GEN_INT (4);
4798 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4799 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4800 xops
[8] = gen_label_rtx (); /* label of main loop */
4801 if(TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4802 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4803 xops
[10] = gen_label_rtx (); /* end label 2 */
4804 xops
[11] = gen_label_rtx (); /* end label 1 */
4805 xops
[12] = gen_label_rtx (); /* end label */
4806 /* xops[13] * Temporary used */
4807 xops
[14] = GEN_INT (0xff);
4808 xops
[15] = GEN_INT (0xff00);
4809 xops
[16] = GEN_INT (0xff0000);
4810 xops
[17] = GEN_INT (0xff000000);
4812 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4814 /* is there a known alignment and is it less then 4 */
4815 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4817 /* is there a known alignment and is it not 2 */
4818 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4820 xops
[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4821 xops
[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4823 /* leave just the 3 lower bits */
4824 /* if this is a q-register, then the high part is used later */
4825 /* therefore user andl rather than andb */
4826 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4827 /* is aligned to 4-byte adress when zero */
4828 output_asm_insn (AS1 (je
,%l8
), xops
);
4829 /* side-effect even Parity when %eax == 3 */
4830 output_asm_insn (AS1 (jp
,%6), xops
);
4832 /* is it aligned to 2 bytes ? */
4833 if (QI_REG_P (xops
[1]))
4834 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4836 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4837 output_asm_insn (AS1 (je
,%7), xops
);
4841 /* since the alignment is 2, we have to check 2 or 0 bytes */
4843 /* check if is aligned to 4 - byte */
4844 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4845 /* is aligned to 4-byte adress when zero */
4846 output_asm_insn (AS1 (je
,%l8
), xops
);
4849 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4850 /* now, compare the bytes */
4851 /* compare with the high part of a q-reg gives shorter code */
4852 if (QI_REG_P (xops
[1]))
4854 /* compare the first n unaligned byte on a byte per byte basis */
4855 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4856 /* when zero we reached the end */
4857 output_asm_insn (AS1 (je
,%l12
), xops
);
4858 /* increment the address */
4859 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4861 /* not needed with an alignment of 2 */
4862 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4864 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4865 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4866 output_asm_insn (AS1 (je
,%l12
), xops
);
4867 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4869 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4871 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4875 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4876 output_asm_insn (AS1 (je
,%l12
), xops
);
4877 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4879 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[7]));
4880 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4881 output_asm_insn (AS1 (je
,%l12
), xops
);
4882 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4884 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[6]));
4885 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4887 output_asm_insn (AS1 (je
,%l12
), xops
);
4888 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4891 /* Generate loop to check 4 bytes at a time */
4892 /* IMHO it is not a good idea to align this loop. It gives only */
4893 /* huge programs, but does not help to speed up */
4894 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4895 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4897 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4898 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4900 if (QI_REG_P (xops
[1]))
4902 /* On i586 it is faster to combine the hi- and lo- part as
4903 a kind of lookahead. If anding both yields zero, then one
4904 of both *could* be zero, otherwise none of both is zero;
4905 this saves one instruction, on i486 this is slower
4906 tested with P-90, i486DX2-66, AMD486DX2-66 */
4909 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
4910 output_asm_insn (AS1 (jne
,%l9
), xops
);
4913 /* check first byte */
4914 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
4915 output_asm_insn (AS1 (je
,%l12
), xops
);
4917 /* check second byte */
4918 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
4919 output_asm_insn (AS1 (je
,%l11
), xops
);
4922 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[9]));
4926 /* check first byte */
4927 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
4928 output_asm_insn (AS1 (je
,%l12
), xops
);
4930 /* check second byte */
4931 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
4932 output_asm_insn (AS1 (je
,%l11
), xops
);
4935 /* check third byte */
4936 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
4937 output_asm_insn (AS1 (je
,%l10
), xops
);
4939 /* check fourth byte and increment address */
4940 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
4941 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
4942 output_asm_insn (AS1 (jne
,%l8
), xops
);
4944 /* now generate fixups when the compare stops within a 4-byte word */
4945 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
4947 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
4948 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4950 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
4951 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4953 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));