]> gcc.gnu.org Git - gcc.git/blame - gcc/config/i386/i386.c
(TARGET_SCHEDULE_PROLOGUE): New to allow prologue to be emitted as asm or rtl.
[gcc.git] / gcc / config / i386 / i386.c
CommitLineData
3b3c6a3f 1/* Subroutines for insn-output.c for Intel X86.
32b5b1aa 2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
2a2ab3f9
JVA
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
97aadbb9 18the Free Software Foundation, 59 Temple Place - Suite 330,
32b5b1aa 19Boston, MA 02111-1307, USA. */
2a2ab3f9
JVA
20
21#include <stdio.h>
0b6b2900 22#include <setjmp.h>
b08de47e 23#include <ctype.h>
2a2ab3f9
JVA
24#include "config.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-flags.h"
32#include "output.h"
33#include "insn-attr.h"
34#include "tree.h"
35#include "flags.h"
ecbc4695 36#include "function.h"
2a2ab3f9 37
997de79c
JVA
38#ifdef EXTRA_CONSTRAINT
39/* If EXTRA_CONSTRAINT is defined, then the 'S'
40 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
41 asm statements that need 'S' for class SIREG will break. */
ad5a6adc
RS
42 error EXTRA_CONSTRAINT conflicts with S constraint letter
43/* The previous line used to be #error, but some compilers barf
44 even if the conditional was untrue. */
997de79c
JVA
45#endif
46
32b5b1aa
SC
47enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
48{
49 reg_p,
50 mem_p,
51 imm_p
52};
53
54/* Processor costs (relative to an add) */
55struct processor_costs i386_cost = { /* 386 specific costs */
56 1, /* cost of an add instruction (2 cycles) */
57 1, /* cost of a lea instruction */
58 3, /* variable shift costs */
59 2, /* constant shift costs */
60 6, /* cost of starting a multiply */
61 1, /* cost of multiply per each bit set */
62 23 /* cost of a divide/mod */
63};
64
65struct processor_costs i486_cost = { /* 486 specific costs */
66 1, /* cost of an add instruction */
67 1, /* cost of a lea instruction */
68 3, /* variable shift costs */
69 2, /* constant shift costs */
70 12, /* cost of starting a multiply */
71 1, /* cost of multiply per each bit set */
72 40 /* cost of a divide/mod */
73};
74
e5cb57e8 75struct processor_costs pentium_cost = {
32b5b1aa
SC
76 1, /* cost of an add instruction */
77 1, /* cost of a lea instruction */
78 3, /* variable shift costs */
e5cb57e8 79 1, /* constant shift costs */
32b5b1aa
SC
80 12, /* cost of starting a multiply */
81 1, /* cost of multiply per each bit set */
e5cb57e8 82 25 /* cost of a divide/mod */
32b5b1aa
SC
83};
84
85struct processor_costs *ix86_cost = &pentium_cost;
86
2a2ab3f9
JVA
87#define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
88
89extern FILE *asm_out_file;
90extern char *strcat ();
91
92char *singlemove_string ();
93char *output_move_const_single ();
c572e5ba 94char *output_fp_cc0_set ();
2a2ab3f9 95
35b63115
RS
96char *hi_reg_name[] = HI_REGISTER_NAMES;
97char *qi_reg_name[] = QI_REGISTER_NAMES;
98char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
4c0d89b5
RS
99
100/* Array of the smallest class containing reg number REGNO, indexed by
101 REGNO. Used by REGNO_REG_CLASS in i386.h. */
102
103enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
104{
105 /* ax, dx, cx, bx */
ab408a86 106 AREG, DREG, CREG, BREG,
4c0d89b5
RS
107 /* si, di, bp, sp */
108 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
109 /* FP registers */
110 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
111 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
112 /* arg pointer */
113 INDEX_REGS
114};
c572e5ba
JVA
115
116/* Test and compare insns in i386.md store the information needed to
117 generate branch and scc insns here. */
118
f5316dfe
MM
119struct rtx_def *i386_compare_op0 = NULL_RTX;
120struct rtx_def *i386_compare_op1 = NULL_RTX;
c572e5ba 121struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
f5316dfe 122
c8c5cb99
SC
123/* which cpu are we scheduling for */
124enum processor_type ix86_cpu;
125
126/* which instruction set architecture to use. */
127int ix86_isa;
128
129/* Strings to hold which cpu and instruction set architecture to use. */
130char *ix86_cpu_string; /* for -mcpu=<xxx> */
131char *ix86_isa_string; /* for -misa=<xxx> */
132
f5316dfe 133/* Register allocation order */
b08de47e 134char *i386_reg_alloc_order;
f5316dfe
MM
135static char regs_allocated[FIRST_PSEUDO_REGISTER];
136
b08de47e
MM
137/* # of registers to use to pass arguments. */
138char *i386_regparm_string; /* # registers to use to pass args */
139int i386_regparm; /* i386_regparm_string as a number */
140
141/* Alignment to use for loops and jumps */
142char *i386_align_loops_string; /* power of two alignment for loops */
143char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
144char *i386_align_funcs_string; /* power of two alignment for functions */
145
146int i386_align_loops; /* power of two alignment for loops */
147int i386_align_jumps; /* power of two alignment for non-loop jumps */
148int i386_align_funcs; /* power of two alignment for functions */
149
f5316dfe
MM
150/* Sometimes certain combinations of command options do not make
151 sense on a particular target machine. You can define a macro
152 `OVERRIDE_OPTIONS' to take account of this. This macro, if
153 defined, is executed once just after all the command options have
154 been parsed.
155
156 Don't use this macro to turn on various extra optimizations for
157 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
158
159void
160override_options ()
161{
c8c5cb99 162 int ch, i, j, regno;
b08de47e
MM
163 char *p;
164 int def_align;
f5316dfe 165
c8c5cb99
SC
166 static struct ptt
167 {
168 char *name; /* Canonical processor name. */
169 enum processor_type processor; /* Processor type enum value. */
32b5b1aa 170 struct processor_costs *cost; /* Processor costs */
c8c5cb99
SC
171 int target_enable; /* Target flags to enable. */
172 int target_disable; /* Target flags to disable. */
173 } processor_target_table[]
32b5b1aa
SC
174 = {{PROCESSOR_COMMON_STRING, PROCESSOR_COMMON, &i486_cost, 0, 0},
175 {PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
176 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
177 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
178 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
179 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
180 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
c8c5cb99
SC
181
182 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
183
f5316dfe
MM
184#ifdef SUBTARGET_OVERRIDE_OPTIONS
185 SUBTARGET_OVERRIDE_OPTIONS;
186#endif
187
188 /* Validate registers in register allocation order */
189 if (i386_reg_alloc_order)
190 {
191 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
192 {
193 switch (ch)
194 {
195 case 'a': regno = 0; break;
196 case 'd': regno = 1; break;
197 case 'c': regno = 2; break;
198 case 'b': regno = 3; break;
199 case 'S': regno = 4; break;
200 case 'D': regno = 5; break;
201 case 'B': regno = 6; break;
202
203 default: fatal ("Register '%c' is unknown", ch);
204 }
205
206 if (regs_allocated[regno])
207 fatal ("Register '%c' was already specified in the allocation order", ch);
208
209 regs_allocated[regno] = 1;
210 }
211 }
b08de47e 212
c8c5cb99
SC
213 /* Get the architectural level. */
214 if (ix86_isa_string == (char *)0)
215 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
216
217 for (i = 0; i < ptt_size; i++)
218 if (! strcmp (ix86_isa_string, processor_target_table[i].name))
219 {
220 ix86_isa = processor_target_table[i].processor;
77a989d1
SC
221 if (ix86_cpu_string == (char *)0)
222 ix86_cpu_string = processor_target_table[i].name;
c8c5cb99
SC
223 break;
224 }
225
226 if (i == ptt_size)
227 {
228 error ("bad value (%s) for -misa= switch", ix86_isa_string);
229 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
230 ix86_isa = PROCESSOR_DEFAULT;
231 }
232
c8c5cb99
SC
233 for (j = 0; j < ptt_size; j++)
234 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
235 {
236 ix86_cpu = processor_target_table[j].processor;
3f803cd9
SC
237 if (i > j && (int)ix86_isa >= (int)PROCESSOR_PENTIUMPRO)
238 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_isa_string);
c8c5cb99
SC
239
240 target_flags |= processor_target_table[j].target_enable;
241 target_flags &= ~processor_target_table[j].target_disable;
242 break;
243 }
244
245 if (j == ptt_size)
246 {
247 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
248 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
249 ix86_cpu = PROCESSOR_DEFAULT;
250 }
251
b08de47e
MM
252 /* Validate -mregparm= value */
253 if (i386_regparm_string)
254 {
255 i386_regparm = atoi (i386_regparm_string);
256 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
257 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
258 }
259
3f803cd9 260 def_align = (TARGET_386) ? 2 : 4;
b08de47e
MM
261
262 /* Validate -malign-loops= value, or provide default */
263 if (i386_align_loops_string)
264 {
265 i386_align_loops = atoi (i386_align_loops_string);
266 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
267 fatal ("-malign-loops=%d is not between 0 and %d",
268 i386_align_loops, MAX_CODE_ALIGN);
269 }
270 else
0da8af8f 271 i386_align_loops = 2;
b08de47e
MM
272
273 /* Validate -malign-jumps= value, or provide default */
274 if (i386_align_jumps_string)
275 {
276 i386_align_jumps = atoi (i386_align_jumps_string);
277 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
278 fatal ("-malign-jumps=%d is not between 0 and %d",
279 i386_align_jumps, MAX_CODE_ALIGN);
280 }
281 else
282 i386_align_jumps = def_align;
283
284 /* Validate -malign-functions= value, or provide default */
285 if (i386_align_funcs_string)
286 {
287 i386_align_funcs = atoi (i386_align_funcs_string);
288 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
289 fatal ("-malign-functions=%d is not between 0 and %d",
290 i386_align_funcs, MAX_CODE_ALIGN);
291 }
292 else
293 i386_align_funcs = def_align;
77a989d1
SC
294
295 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
296 flag_omit_frame_pointer = 1;
983f1685
SC
297
298 /* pic references don't explicitly mention pic_offset_table_rtx */
299 if (flag_pic)
300 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
f5316dfe
MM
301}
302\f
303/* A C statement (sans semicolon) to choose the order in which to
304 allocate hard registers for pseudo-registers local to a basic
305 block.
306
307 Store the desired register order in the array `reg_alloc_order'.
308 Element 0 should be the register to allocate first; element 1, the
309 next register; and so on.
310
311 The macro body should not assume anything about the contents of
312 `reg_alloc_order' before execution of the macro.
313
314 On most machines, it is not necessary to define this macro. */
315
316void
317order_regs_for_local_alloc ()
318{
319 int i, ch, order, regno;
320
321 /* User specified the register allocation order */
322 if (i386_reg_alloc_order)
323 {
324 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
325 {
326 switch (ch)
327 {
328 case 'a': regno = 0; break;
329 case 'd': regno = 1; break;
330 case 'c': regno = 2; break;
331 case 'b': regno = 3; break;
332 case 'S': regno = 4; break;
333 case 'D': regno = 5; break;
334 case 'B': regno = 6; break;
335 }
336
337 reg_alloc_order[order++] = regno;
338 }
339
340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
341 {
342 if (!regs_allocated[i])
343 reg_alloc_order[order++] = i;
344 }
345 }
346
32b5b1aa 347 /* If users did not specify a register allocation order, use natural order */
f5316dfe
MM
348 else
349 {
350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
351 reg_alloc_order[i] = i;
f5316dfe
MM
352 }
353}
354
32b5b1aa
SC
355\f
356void
357optimization_options (level)
358 int level;
359{
360 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
361 make the problem with not enough registers even worse */
362#ifdef INSN_SCHEDULING
363 if (level > 1)
364 flag_schedule_insns = 0;
365#endif
366}
b08de47e
MM
367\f
368/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
369 attribute for DECL. The attributes in ATTRIBUTES have previously been
370 assigned to DECL. */
371
372int
373i386_valid_decl_attribute_p (decl, attributes, identifier, args)
374 tree decl;
375 tree attributes;
376 tree identifier;
377 tree args;
378{
379 return 0;
380}
381
382/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
383 attribute for TYPE. The attributes in ATTRIBUTES have previously been
384 assigned to TYPE. */
385
386int
387i386_valid_type_attribute_p (type, attributes, identifier, args)
388 tree type;
389 tree attributes;
390 tree identifier;
391 tree args;
392{
393 if (TREE_CODE (type) != FUNCTION_TYPE
394 && TREE_CODE (type) != FIELD_DECL
395 && TREE_CODE (type) != TYPE_DECL)
396 return 0;
397
398 /* Stdcall attribute says callee is responsible for popping arguments
399 if they are not variable. */
400 if (is_attribute_p ("stdcall", identifier))
401 return (args == NULL_TREE);
402
403 /* Cdecl attribute says the callee is a normal C declaration */
404 if (is_attribute_p ("cdecl", identifier))
405 return (args == NULL_TREE);
406
407 /* Regparm attribute specifies how many integer arguments are to be
408 passed in registers */
409 if (is_attribute_p ("regparm", identifier))
410 {
411 tree cst;
412
413 if (!args || TREE_CODE (args) != TREE_LIST
414 || TREE_CHAIN (args) != NULL_TREE
415 || TREE_VALUE (args) == NULL_TREE)
416 return 0;
417
418 cst = TREE_VALUE (args);
419 if (TREE_CODE (cst) != INTEGER_CST)
420 return 0;
421
422 if (TREE_INT_CST_HIGH (cst) != 0
423 || TREE_INT_CST_LOW (cst) < 0
424 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
425 return 0;
426
427 return 1;
428 }
429
430 return 0;
431}
432
433/* Return 0 if the attributes for two types are incompatible, 1 if they
434 are compatible, and 2 if they are nearly compatible (which causes a
435 warning to be generated). */
436
437int
438i386_comp_type_attributes (type1, type2)
439 tree type1;
440 tree type2;
441{
442 return 1;
443}
444
445\f
446/* Value is the number of bytes of arguments automatically
447 popped when returning from a subroutine call.
448 FUNDECL is the declaration node of the function (as a tree),
449 FUNTYPE is the data type of the function (as a tree),
450 or for a library call it is an identifier node for the subroutine name.
451 SIZE is the number of bytes of arguments passed on the stack.
452
453 On the 80386, the RTD insn may be used to pop them if the number
454 of args is fixed, but if the number is variable then the caller
455 must pop them all. RTD can't be used for library calls now
456 because the library is compiled with the Unix compiler.
457 Use of RTD is a selectable option, since it is incompatible with
458 standard Unix calling sequences. If the option is not selected,
459 the caller must always pop the args.
460
461 The attribute stdcall is equivalent to RTD on a per module basis. */
462
463int
464i386_return_pops_args (fundecl, funtype, size)
465 tree fundecl;
466 tree funtype;
467 int size;
698cdd84 468{
b08de47e
MM
469 int rtd = TARGET_RTD;
470
471 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
472 return 0;
473
698cdd84
SC
474 /* Cdecl functions override -mrtd, and never pop the stack */
475 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
476
477 /* Stdcall functions will pop the stack if not variable args */
478 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
479 rtd = 1;
480
481 if (rtd
482 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
483 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
484 return size;
485 }
486
487 /* Lose any fake structure return argument */
488 if (aggregate_value_p (TREE_TYPE (funtype)))
489 return GET_MODE_SIZE (Pmode);
490
2614aac6 491 return 0;
b08de47e
MM
492}
493
494\f
495/* Argument support functions. */
496
497/* Initialize a variable CUM of type CUMULATIVE_ARGS
498 for a call to a function whose data type is FNTYPE.
499 For a library call, FNTYPE is 0. */
500
501void
502init_cumulative_args (cum, fntype, libname)
503 CUMULATIVE_ARGS *cum; /* argument info to initialize */
504 tree fntype; /* tree ptr for function decl */
505 rtx libname; /* SYMBOL_REF of library name or 0 */
506{
507 static CUMULATIVE_ARGS zero_cum;
508 tree param, next_param;
509
510 if (TARGET_DEBUG_ARG)
511 {
512 fprintf (stderr, "\ninit_cumulative_args (");
513 if (fntype)
514 {
515 tree ret_type = TREE_TYPE (fntype);
516 fprintf (stderr, "fntype code = %s, ret code = %s",
517 tree_code_name[ (int)TREE_CODE (fntype) ],
518 tree_code_name[ (int)TREE_CODE (ret_type) ]);
519 }
520 else
521 fprintf (stderr, "no fntype");
522
523 if (libname)
524 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
525 }
526
527 *cum = zero_cum;
528
529 /* Set up the number of registers to use for passing arguments. */
530 cum->nregs = i386_regparm;
531 if (fntype)
532 {
533 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
534 if (attr)
535 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
536 }
537
538 /* Determine if this function has variable arguments. This is
539 indicated by the last argument being 'void_type_mode' if there
540 are no variable arguments. If there are variable arguments, then
541 we won't pass anything in registers */
542
543 if (cum->nregs)
544 {
545 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
546 param != (tree)0;
547 param = next_param)
548 {
549 next_param = TREE_CHAIN (param);
550 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
551 cum->nregs = 0;
552 }
553 }
554
555 if (TARGET_DEBUG_ARG)
556 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
557
558 return;
559}
560
561/* Update the data in CUM to advance over an argument
562 of mode MODE and data type TYPE.
563 (TYPE is null for libcalls where that information may not be available.) */
564
565void
566function_arg_advance (cum, mode, type, named)
567 CUMULATIVE_ARGS *cum; /* current arg information */
568 enum machine_mode mode; /* current arg mode */
569 tree type; /* type of the argument or 0 if lib support */
570 int named; /* whether or not the argument was named */
571{
572 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
573 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
574
575 if (TARGET_DEBUG_ARG)
576 fprintf (stderr,
577 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
578 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
579
580 cum->words += words;
581 cum->nregs -= words;
582 cum->regno += words;
583
584 if (cum->nregs <= 0)
585 {
586 cum->nregs = 0;
587 cum->regno = 0;
588 }
589
590 return;
591}
592
593/* Define where to put the arguments to a function.
594 Value is zero to push the argument on the stack,
595 or a hard register in which to store the argument.
596
597 MODE is the argument's machine mode.
598 TYPE is the data type of the argument (as a tree).
599 This is null for libcalls where that information may
600 not be available.
601 CUM is a variable of type CUMULATIVE_ARGS which gives info about
602 the preceding args and about the function being called.
603 NAMED is nonzero if this argument is a named parameter
604 (otherwise it is an extra parameter matching an ellipsis). */
605
606struct rtx_def *
607function_arg (cum, mode, type, named)
608 CUMULATIVE_ARGS *cum; /* current arg information */
609 enum machine_mode mode; /* current arg mode */
610 tree type; /* type of the argument or 0 if lib support */
611 int named; /* != 0 for normal args, == 0 for ... args */
612{
613 rtx ret = NULL_RTX;
614 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
615 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
616
617 switch (mode)
618 {
619 default: /* for now, pass fp/complex values on the stack */
620 break;
621
622 case BLKmode:
623 case DImode:
624 case SImode:
625 case HImode:
626 case QImode:
627 if (words <= cum->nregs)
628 ret = gen_rtx (REG, mode, cum->regno);
629 break;
630 }
631
632 if (TARGET_DEBUG_ARG)
633 {
634 fprintf (stderr,
635 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
636 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
637
638 if (ret)
639 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
640 else
641 fprintf (stderr, ", stack");
642
643 fprintf (stderr, " )\n");
644 }
645
646 return ret;
647}
648
649/* For an arg passed partly in registers and partly in memory,
650 this is the number of registers used.
651 For args passed entirely in registers or entirely in memory, zero. */
652
653int
654function_arg_partial_nregs (cum, mode, type, named)
655 CUMULATIVE_ARGS *cum; /* current arg information */
656 enum machine_mode mode; /* current arg mode */
657 tree type; /* type of the argument or 0 if lib support */
658 int named; /* != 0 for normal args, == 0 for ... args */
659{
660 return 0;
661}
662
2a2ab3f9
JVA
663\f
664/* Output an insn whose source is a 386 integer register. SRC is the
665 rtx for the register, and TEMPLATE is the op-code template. SRC may
666 be either SImode or DImode.
667
668 The template will be output with operands[0] as SRC, and operands[1]
669 as a pointer to the top of the 386 stack. So a call from floatsidf2
670 would look like this:
671
672 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
673
674 where %z0 corresponds to the caller's operands[1], and is used to
675 emit the proper size suffix.
676
677 ??? Extend this to handle HImode - a 387 can load and store HImode
678 values directly. */
679
680void
681output_op_from_reg (src, template)
682 rtx src;
683 char *template;
684{
685 rtx xops[4];
5f1ec3e6 686 int size = GET_MODE_SIZE (GET_MODE (src));
2a2ab3f9
JVA
687
688 xops[0] = src;
689 xops[1] = AT_SP (Pmode);
5f1ec3e6 690 xops[2] = GEN_INT (size);
2a2ab3f9
JVA
691 xops[3] = stack_pointer_rtx;
692
5f1ec3e6 693 if (size > UNITS_PER_WORD)
2a2ab3f9 694 {
5f1ec3e6
JVA
695 rtx high;
696 if (size > 2 * UNITS_PER_WORD)
697 {
698 high = gen_rtx (REG, SImode, REGNO (src) + 2);
699 output_asm_insn (AS1 (push%L0,%0), &high);
700 }
701 high = gen_rtx (REG, SImode, REGNO (src) + 1);
2a2ab3f9
JVA
702 output_asm_insn (AS1 (push%L0,%0), &high);
703 }
704 output_asm_insn (AS1 (push%L0,%0), &src);
705
706 output_asm_insn (template, xops);
707
708 output_asm_insn (AS2 (add%L3,%2,%3), xops);
709}
710\f
711/* Output an insn to pop an value from the 387 top-of-stack to 386
712 register DEST. The 387 register stack is popped if DIES is true. If
713 the mode of DEST is an integer mode, a `fist' integer store is done,
714 otherwise a `fst' float store is done. */
715
716void
717output_to_reg (dest, dies)
718 rtx dest;
719 int dies;
720{
721 rtx xops[4];
5f1ec3e6 722 int size = GET_MODE_SIZE (GET_MODE (dest));
2a2ab3f9
JVA
723
724 xops[0] = AT_SP (Pmode);
725 xops[1] = stack_pointer_rtx;
5f1ec3e6 726 xops[2] = GEN_INT (size);
2a2ab3f9
JVA
727 xops[3] = dest;
728
729 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
730
731 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
732 {
733 if (dies)
734 output_asm_insn (AS1 (fistp%z3,%y0), xops);
735 else
736 output_asm_insn (AS1 (fist%z3,%y0), xops);
737 }
738 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
739 {
740 if (dies)
741 output_asm_insn (AS1 (fstp%z3,%y0), xops);
742 else
5f1ec3e6
JVA
743 {
744 if (GET_MODE (dest) == XFmode)
30387275
JVA
745 {
746 output_asm_insn (AS1 (fstp%z3,%y0), xops);
747 output_asm_insn (AS1 (fld%z3,%y0), xops);
748 }
5f1ec3e6
JVA
749 else
750 output_asm_insn (AS1 (fst%z3,%y0), xops);
751 }
2a2ab3f9
JVA
752 }
753 else
754 abort ();
755
756 output_asm_insn (AS1 (pop%L0,%0), &dest);
757
5f1ec3e6 758 if (size > UNITS_PER_WORD)
2a2ab3f9
JVA
759 {
760 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
761 output_asm_insn (AS1 (pop%L0,%0), &dest);
5f1ec3e6
JVA
762 if (size > 2 * UNITS_PER_WORD)
763 {
764 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
765 output_asm_insn (AS1 (pop%L0,%0), &dest);
766 }
2a2ab3f9
JVA
767 }
768}
769\f
770char *
771singlemove_string (operands)
772 rtx *operands;
773{
774 rtx x;
775 if (GET_CODE (operands[0]) == MEM
776 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
777 {
778 if (XEXP (x, 0) != stack_pointer_rtx)
779 abort ();
780 return "push%L1 %1";
781 }
782 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
783 {
784 return output_move_const_single (operands);
785 }
786 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
787 return AS2 (mov%L0,%1,%0);
788 else if (CONSTANT_P (operands[1]))
789 return AS2 (mov%L0,%1,%0);
790 else
791 {
792 output_asm_insn ("push%L1 %1", operands);
793 return "pop%L0 %0";
794 }
795}
796\f
797/* Return a REG that occurs in ADDR with coefficient 1.
798 ADDR can be effectively incremented by incrementing REG. */
799
800static rtx
801find_addr_reg (addr)
802 rtx addr;
803{
804 while (GET_CODE (addr) == PLUS)
805 {
806 if (GET_CODE (XEXP (addr, 0)) == REG)
807 addr = XEXP (addr, 0);
808 else if (GET_CODE (XEXP (addr, 1)) == REG)
809 addr = XEXP (addr, 1);
810 else if (CONSTANT_P (XEXP (addr, 0)))
811 addr = XEXP (addr, 1);
812 else if (CONSTANT_P (XEXP (addr, 1)))
813 addr = XEXP (addr, 0);
814 else
815 abort ();
816 }
817 if (GET_CODE (addr) == REG)
818 return addr;
819 abort ();
820}
821
b840bfb0 822\f
2a2ab3f9
JVA
823/* Output an insn to add the constant N to the register X. */
824
825static void
826asm_add (n, x)
827 int n;
828 rtx x;
829{
830 rtx xops[2];
3b3c6a3f
MM
831 xops[0] = x;
832
833 if (n == -1)
834 output_asm_insn (AS1 (dec%L0,%0), xops);
835 else if (n == 1)
836 output_asm_insn (AS1 (inc%L0,%0), xops);
837 else if (n < 0)
2a2ab3f9 838 {
3b3c6a3f
MM
839 xops[1] = GEN_INT (-n);
840 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
2a2ab3f9
JVA
841 }
842 else if (n > 0)
843 {
3b3c6a3f
MM
844 xops[1] = GEN_INT (n);
845 output_asm_insn (AS2 (add%L0,%1,%0), xops);
2a2ab3f9
JVA
846 }
847}
848
b840bfb0 849\f
2a2ab3f9
JVA
850/* Output assembler code to perform a doubleword move insn
851 with operands OPERANDS. */
852
853char *
854output_move_double (operands)
855 rtx *operands;
856{
857 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
858 rtx latehalf[2];
5f1ec3e6
JVA
859 rtx middlehalf[2];
860 rtx xops[2];
2a2ab3f9 861 rtx addreg0 = 0, addreg1 = 0;
85ddb399 862 int dest_overlapped_low = 0;
57e1b65c 863 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
5f1ec3e6
JVA
864
865 middlehalf[0] = 0;
866 middlehalf[1] = 0;
2a2ab3f9
JVA
867
868 /* First classify both operands. */
869
870 if (REG_P (operands[0]))
871 optype0 = REGOP;
872 else if (offsettable_memref_p (operands[0]))
873 optype0 = OFFSOP;
874 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
875 optype0 = POPOP;
876 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
877 optype0 = PUSHOP;
878 else if (GET_CODE (operands[0]) == MEM)
879 optype0 = MEMOP;
880 else
881 optype0 = RNDOP;
882
883 if (REG_P (operands[1]))
884 optype1 = REGOP;
885 else if (CONSTANT_P (operands[1]))
886 optype1 = CNSTOP;
887 else if (offsettable_memref_p (operands[1]))
888 optype1 = OFFSOP;
889 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
890 optype1 = POPOP;
891 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
892 optype1 = PUSHOP;
893 else if (GET_CODE (operands[1]) == MEM)
894 optype1 = MEMOP;
895 else
896 optype1 = RNDOP;
897
898 /* Check for the cases that the operand constraints are not
899 supposed to allow to happen. Abort if we get one,
900 because generating code for these cases is painful. */
901
902 if (optype0 == RNDOP || optype1 == RNDOP)
903 abort ();
904
905 /* If one operand is decrementing and one is incrementing
906 decrement the former register explicitly
907 and change that operand into ordinary indexing. */
908
909 if (optype0 == PUSHOP && optype1 == POPOP)
910 {
5f1ec3e6 911 /* ??? Can this ever happen on i386? */
2a2ab3f9 912 operands[0] = XEXP (XEXP (operands[0], 0), 0);
5f1ec3e6
JVA
913 asm_add (-size, operands[0]);
914 if (GET_MODE (operands[1]) == XFmode)
915 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
916 else if (GET_MODE (operands[0]) == DFmode)
917 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
918 else
919 operands[0] = gen_rtx (MEM, DImode, operands[0]);
2a2ab3f9
JVA
920 optype0 = OFFSOP;
921 }
5f1ec3e6 922
2a2ab3f9
JVA
923 if (optype0 == POPOP && optype1 == PUSHOP)
924 {
5f1ec3e6 925 /* ??? Can this ever happen on i386? */
2a2ab3f9 926 operands[1] = XEXP (XEXP (operands[1], 0), 0);
5f1ec3e6
JVA
927 asm_add (-size, operands[1]);
928 if (GET_MODE (operands[1]) == XFmode)
929 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
930 else if (GET_MODE (operands[1]) == DFmode)
931 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
932 else
933 operands[1] = gen_rtx (MEM, DImode, operands[1]);
2a2ab3f9
JVA
934 optype1 = OFFSOP;
935 }
936
937 /* If an operand is an unoffsettable memory ref, find a register
938 we can increment temporarily to make it refer to the second word. */
939
940 if (optype0 == MEMOP)
941 addreg0 = find_addr_reg (XEXP (operands[0], 0));
942
943 if (optype1 == MEMOP)
944 addreg1 = find_addr_reg (XEXP (operands[1], 0));
945
946 /* Ok, we can do one word at a time.
947 Normally we do the low-numbered word first,
948 but if either operand is autodecrementing then we
949 do the high-numbered word first.
950
951 In either case, set up in LATEHALF the operands to use
952 for the high-numbered word and in some cases alter the
953 operands in OPERANDS to be suitable for the low-numbered word. */
954
5f1ec3e6
JVA
955 if (size == 12)
956 {
957 if (optype0 == REGOP)
958 {
959 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
960 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
961 }
962 else if (optype0 == OFFSOP)
963 {
964 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
965 latehalf[0] = adj_offsettable_operand (operands[0], 8);
966 }
967 else
968 {
969 middlehalf[0] = operands[0];
970 latehalf[0] = operands[0];
971 }
972
973 if (optype1 == REGOP)
974 {
975 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
976 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
977 }
978 else if (optype1 == OFFSOP)
979 {
980 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
981 latehalf[1] = adj_offsettable_operand (operands[1], 8);
982 }
983 else if (optype1 == CNSTOP)
984 {
985 if (GET_CODE (operands[1]) == CONST_DOUBLE)
986 {
987 REAL_VALUE_TYPE r; long l[3];
2a2ab3f9 988
5f1ec3e6
JVA
989 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
990 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
991 operands[1] = GEN_INT (l[0]);
992 middlehalf[1] = GEN_INT (l[1]);
993 latehalf[1] = GEN_INT (l[2]);
994 }
995 else if (CONSTANT_P (operands[1]))
996 /* No non-CONST_DOUBLE constant should ever appear here. */
997 abort ();
998 }
999 else
1000 {
1001 middlehalf[1] = operands[1];
1002 latehalf[1] = operands[1];
1003 }
1004 }
1005 else /* size is not 12: */
2a2ab3f9 1006 {
5f1ec3e6
JVA
1007 if (optype0 == REGOP)
1008 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1009 else if (optype0 == OFFSOP)
1010 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1011 else
1012 latehalf[0] = operands[0];
1013
1014 if (optype1 == REGOP)
1015 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1016 else if (optype1 == OFFSOP)
1017 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1018 else if (optype1 == CNSTOP)
57e1b65c 1019 split_double (operands[1], &operands[1], &latehalf[1]);
5f1ec3e6
JVA
1020 else
1021 latehalf[1] = operands[1];
2a2ab3f9 1022 }
2a2ab3f9
JVA
1023
1024 /* If insn is effectively movd N (sp),-(sp) then we will do the
e7c2087c
RS
1025 high word first. We should use the adjusted operand 1
1026 (which is N+4 (sp) or N+8 (sp))
1027 for the low word and middle word as well,
1028 to compensate for the first decrement of sp. */
2a2ab3f9
JVA
1029 if (optype0 == PUSHOP
1030 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1031 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
e7c2087c 1032 middlehalf[1] = operands[1] = latehalf[1];
2a2ab3f9 1033
81fd0956 1034 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
85ddb399
RS
1035 if the upper part of reg N does not appear in the MEM, arrange to
1036 emit the move late-half first. Otherwise, compute the MEM address
1037 into the upper part of N and use that as a pointer to the memory
1038 operand. */
81fd0956 1039 if (optype0 == REGOP
85ddb399 1040 && (optype1 == OFFSOP || optype1 == MEMOP))
81fd0956 1041 {
85ddb399
RS
1042 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1043 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1044 {
1045 /* If both halves of dest are used in the src memory address,
1046 compute the address into latehalf of dest. */
5f1ec3e6 1047compadr:
85ddb399
RS
1048 xops[0] = latehalf[0];
1049 xops[1] = XEXP (operands[1], 0);
1050 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
5f1ec3e6
JVA
1051 if( GET_MODE (operands[1]) == XFmode )
1052 {
1053/* abort (); */
1054 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1055 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1056 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1057 }
1058 else
1059 {
1060 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1061 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1062 }
1063 }
1064 else if (size == 12
1065 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1066 {
1067 /* Check for two regs used by both source and dest. */
1068 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1069 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1070 goto compadr;
1071
1072 /* JRV says this can't happen: */
1073 if (addreg0 || addreg1)
1074 abort();
1075
1076 /* Only the middle reg conflicts; simply put it last. */
1077 output_asm_insn (singlemove_string (operands), operands);
1078 output_asm_insn (singlemove_string (latehalf), latehalf);
1079 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1080 return "";
85ddb399
RS
1081 }
1082 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1083 /* If the low half of dest is mentioned in the source memory
1084 address, the arrange to emit the move late half first. */
1085 dest_overlapped_low = 1;
81fd0956
RS
1086 }
1087
2a2ab3f9
JVA
1088 /* If one or both operands autodecrementing,
1089 do the two words, high-numbered first. */
1090
1091 /* Likewise, the first move would clobber the source of the second one,
1092 do them in the other order. This happens only for registers;
1093 such overlap can't happen in memory unless the user explicitly
1094 sets it up, and that is an undefined circumstance. */
1095
5f1ec3e6 1096/*
2a2ab3f9
JVA
1097 if (optype0 == PUSHOP || optype1 == PUSHOP
1098 || (optype0 == REGOP && optype1 == REGOP
85ddb399
RS
1099 && REGNO (operands[0]) == REGNO (latehalf[1]))
1100 || dest_overlapped_low)
5f1ec3e6
JVA
1101*/
1102 if (optype0 == PUSHOP || optype1 == PUSHOP
1103 || (optype0 == REGOP && optype1 == REGOP
1104 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1105 || REGNO (operands[0]) == REGNO (latehalf[1])))
1106 || dest_overlapped_low)
2a2ab3f9
JVA
1107 {
1108 /* Make any unoffsettable addresses point at high-numbered word. */
1109 if (addreg0)
5f1ec3e6 1110 asm_add (size-4, addreg0);
2a2ab3f9 1111 if (addreg1)
5f1ec3e6 1112 asm_add (size-4, addreg1);
2a2ab3f9
JVA
1113
1114 /* Do that word. */
1115 output_asm_insn (singlemove_string (latehalf), latehalf);
1116
1117 /* Undo the adds we just did. */
1118 if (addreg0)
1119 asm_add (-4, addreg0);
1120 if (addreg1)
1121 asm_add (-4, addreg1);
1122
5f1ec3e6
JVA
1123 if (size == 12)
1124 {
1125 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1126 if (addreg0)
1127 asm_add (-4, addreg0);
1128 if (addreg1)
1129 asm_add (-4, addreg1);
1130 }
1131
2a2ab3f9
JVA
1132 /* Do low-numbered word. */
1133 return singlemove_string (operands);
1134 }
1135
1136 /* Normal case: do the two words, low-numbered first. */
1137
1138 output_asm_insn (singlemove_string (operands), operands);
1139
5f1ec3e6
JVA
1140 /* Do the middle one of the three words for long double */
1141 if (size == 12)
1142 {
1143 if (addreg0)
1144 asm_add (4, addreg0);
1145 if (addreg1)
1146 asm_add (4, addreg1);
1147
1148 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1149 }
1150
2a2ab3f9
JVA
1151 /* Make any unoffsettable addresses point at high-numbered word. */
1152 if (addreg0)
1153 asm_add (4, addreg0);
1154 if (addreg1)
1155 asm_add (4, addreg1);
1156
1157 /* Do that word. */
1158 output_asm_insn (singlemove_string (latehalf), latehalf);
1159
1160 /* Undo the adds we just did. */
1161 if (addreg0)
5f1ec3e6 1162 asm_add (4-size, addreg0);
2a2ab3f9 1163 if (addreg1)
5f1ec3e6 1164 asm_add (4-size, addreg1);
2a2ab3f9
JVA
1165
1166 return "";
1167}
b840bfb0
MM
1168
1169\f
1170#define MAX_TMPS 2 /* max temporary registers used */
1171
1172/* Output the appropriate code to move push memory on the stack */
1173
1174char *
1175output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1176 rtx operands[];
1177 rtx insn;
1178 int length;
1179 int tmp_start;
1180 int n_operands;
1181{
1182
1183 struct {
1184 char *load;
1185 char *push;
1186 rtx xops[2];
1187 } tmp_info[MAX_TMPS];
1188
1189 rtx src = operands[1];
1190 int max_tmps = 0;
1191 int offset = 0;
1192 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1193 int stack_offset = 0;
1194 int i, num_tmps;
1195 rtx xops[1];
1196
1197 if (!offsettable_memref_p (src))
1198 fatal_insn ("Source is not offsettable", insn);
1199
1200 if ((length & 3) != 0)
1201 fatal_insn ("Pushing non-word aligned size", insn);
1202
1203 /* Figure out which temporary registers we have available */
1204 for (i = tmp_start; i < n_operands; i++)
1205 {
1206 if (GET_CODE (operands[i]) == REG)
1207 {
1208 if (reg_overlap_mentioned_p (operands[i], src))
1209 continue;
1210
1211 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1212 if (max_tmps == MAX_TMPS)
1213 break;
1214 }
1215 }
1216
1217 if (max_tmps == 0)
1218 for (offset = length - 4; offset >= 0; offset -= 4)
1219 {
1220 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1221 output_asm_insn (AS1(push%L0,%0), xops);
1222 if (stack_p)
1223 stack_offset += 4;
1224 }
1225
1226 else
1227 for (offset = length - 4; offset >= 0; )
1228 {
1229 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1230 {
1231 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1232 tmp_info[num_tmps].push = AS1(push%L0,%1);
1233 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1234 offset -= 4;
1235 }
1236
1237 for (i = 0; i < num_tmps; i++)
1238 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1239
1240 for (i = 0; i < num_tmps; i++)
1241 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1242
1243 if (stack_p)
1244 stack_offset += 4*num_tmps;
1245 }
1246
1247 return "";
1248}
1249
1250\f
1251
1252/* Output the appropriate code to move data between two memory locations */
1253
1254char *
1255output_move_memory (operands, insn, length, tmp_start, n_operands)
1256 rtx operands[];
1257 rtx insn;
1258 int length;
1259 int tmp_start;
1260 int n_operands;
1261{
1262 struct {
1263 char *load;
1264 char *store;
1265 rtx xops[3];
1266 } tmp_info[MAX_TMPS];
1267
1268 rtx dest = operands[0];
1269 rtx src = operands[1];
1270 rtx qi_tmp = NULL_RTX;
1271 int max_tmps = 0;
1272 int offset = 0;
1273 int i, num_tmps;
1274 rtx xops[3];
1275
1276 if (GET_CODE (dest) == MEM
1277 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1278 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1279 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1280
1281 if (!offsettable_memref_p (src))
1282 fatal_insn ("Source is not offsettable", insn);
1283
1284 if (!offsettable_memref_p (dest))
1285 fatal_insn ("Destination is not offsettable", insn);
1286
1287 /* Figure out which temporary registers we have available */
1288 for (i = tmp_start; i < n_operands; i++)
1289 {
1290 if (GET_CODE (operands[i]) == REG)
1291 {
1292 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1293 qi_tmp = operands[i];
1294
1295 if (reg_overlap_mentioned_p (operands[i], dest))
1296 fatal_insn ("Temporary register overlaps the destination", insn);
1297
1298 if (reg_overlap_mentioned_p (operands[i], src))
1299 fatal_insn ("Temporary register overlaps the source", insn);
1300
1301 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1302 if (max_tmps == MAX_TMPS)
1303 break;
1304 }
1305 }
1306
1307 if (max_tmps == 0)
1308 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1309
1310 if ((length & 1) != 0)
1311 {
1312 if (!qi_tmp)
1313 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1314 }
1315
1316 while (length > 1)
1317 {
1318 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1319 {
1320 if (length >= 4)
1321 {
1322 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1323 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1324 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1325 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1326 offset += 4;
1327 length -= 4;
1328 }
1329 else if (length >= 2)
1330 {
1331 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1332 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1333 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1334 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1335 offset += 2;
1336 length -= 2;
1337 }
1338 else
1339 break;
1340 }
1341
1342 for (i = 0; i < num_tmps; i++)
1343 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1344
1345 for (i = 0; i < num_tmps; i++)
1346 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1347 }
1348
1349 if (length == 1)
1350 {
1351 xops[0] = adj_offsettable_operand (dest, offset);
1352 xops[1] = adj_offsettable_operand (src, offset);
1353 xops[2] = qi_tmp;
1354 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1355 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1356 }
1357
1358 return "";
1359}
1360
2a2ab3f9
JVA
1361\f
1362int
1363standard_80387_constant_p (x)
1364 rtx x;
1365{
0b6b2900
RK
1366#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1367 REAL_VALUE_TYPE d;
1368 jmp_buf handler;
1369 int is0, is1;
2a2ab3f9 1370
0b6b2900
RK
1371 if (setjmp (handler))
1372 return 0;
2a2ab3f9 1373
0b6b2900
RK
1374 set_float_handler (handler);
1375 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
8ab92e4e 1376 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
0b6b2900
RK
1377 is1 = REAL_VALUES_EQUAL (d, dconst1);
1378 set_float_handler (NULL_PTR);
1379
1380 if (is0)
2a2ab3f9
JVA
1381 return 1;
1382
0b6b2900 1383 if (is1)
2a2ab3f9
JVA
1384 return 2;
1385
1386 /* Note that on the 80387, other constants, such as pi,
1387 are much slower to load as standard constants
1388 than to load from doubles in memory! */
0b6b2900 1389#endif
2a2ab3f9
JVA
1390
1391 return 0;
1392}
1393
1394char *
1395output_move_const_single (operands)
1396 rtx *operands;
1397{
1398 if (FP_REG_P (operands[0]))
1399 {
1400 int conval = standard_80387_constant_p (operands[1]);
1401
1402 if (conval == 1)
1403 return "fldz";
1404
1405 if (conval == 2)
1406 return "fld1";
1407 }
1408 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1409 {
5f1ec3e6
JVA
1410 REAL_VALUE_TYPE r; long l;
1411
1412 if (GET_MODE (operands[1]) == XFmode)
1413 abort ();
1414
1415 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1416 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1417 operands[1] = GEN_INT (l);
2a2ab3f9
JVA
1418 }
1419 return singlemove_string (operands);
1420}
1421\f
1422/* Returns 1 if OP is either a symbol reference or a sum of a symbol
1423 reference and a constant. */
1424
1425int
1426symbolic_operand (op, mode)
1427 register rtx op;
1428 enum machine_mode mode;
1429{
1430 switch (GET_CODE (op))
1431 {
1432 case SYMBOL_REF:
1433 case LABEL_REF:
1434 return 1;
1435 case CONST:
1436 op = XEXP (op, 0);
1437 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1438 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1439 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1440 default:
1441 return 0;
1442 }
1443}
fee2770d
RS
1444
1445/* Test for a valid operand for a call instruction.
1446 Don't allow the arg pointer register or virtual regs
1447 since they may change into reg + const, which the patterns
1448 can't handle yet. */
1449
1450int
1451call_insn_operand (op, mode)
1452 rtx op;
1453 enum machine_mode mode;
4f2c8ebb
RS
1454{
1455 if (GET_CODE (op) == MEM
1456 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1457 /* This makes a difference for PIC. */
1458 && general_operand (XEXP (op, 0), Pmode))
1459 || (GET_CODE (XEXP (op, 0)) == REG
1460 && XEXP (op, 0) != arg_pointer_rtx
1461 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1462 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1463 return 1;
1464 return 0;
1465}
1466
1467/* Like call_insn_operand but allow (mem (symbol_ref ...))
1468 even if pic. */
1469
1470int
1471expander_call_insn_operand (op, mode)
1472 rtx op;
1473 enum machine_mode mode;
fee2770d
RS
1474{
1475 if (GET_CODE (op) == MEM
1476 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1477 || (GET_CODE (XEXP (op, 0)) == REG
1478 && XEXP (op, 0) != arg_pointer_rtx
1479 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1480 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1481 return 1;
1482 return 0;
1483}
d784886d
RK
1484
1485/* Return 1 if OP is a comparison operator that can use the condition code
1486 generated by an arithmetic operation. */
1487
1488int
1489arithmetic_comparison_operator (op, mode)
1490 register rtx op;
1491 enum machine_mode mode;
1492{
1493 enum rtx_code code;
1494
1495 if (mode != VOIDmode && mode != GET_MODE (op))
1496 return 0;
1497 code = GET_CODE (op);
1498 if (GET_RTX_CLASS (code) != '<')
1499 return 0;
1500
1501 return (code != GT && code != LE);
1502}
2a2ab3f9
JVA
1503\f
1504/* Returns 1 if OP contains a symbol reference */
1505
1506int
1507symbolic_reference_mentioned_p (op)
1508 rtx op;
1509{
1510 register char *fmt;
1511 register int i;
1512
1513 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1514 return 1;
1515
1516 fmt = GET_RTX_FORMAT (GET_CODE (op));
1517 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1518 {
1519 if (fmt[i] == 'E')
1520 {
1521 register int j;
1522
1523 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1524 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1525 return 1;
1526 }
1527 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1528 return 1;
1529 }
1530
1531 return 0;
1532}
32b5b1aa
SC
1533\f
1534/* Attempt to expand a binary operator. Make the expansion closer to the
1535 actual machine, then just general_operand, which will allow 3 separate
1536 memory references (one output, two input) in a single insn. Return
1537 whether the insn fails, or succeeds. */
1538
1539int
1540ix86_expand_binary_operator (code, mode, operands)
1541 enum rtx_code code;
1542 enum machine_mode mode;
1543 rtx operands[];
1544{
1545 rtx insn;
1546 int i;
1547 int modified;
1548
1549 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1550 if (GET_RTX_CLASS (code) == 'c'
1551 && (rtx_equal_p (operands[0], operands[2])
1552 || immediate_operand (operands[1], mode)))
1553 {
1554 rtx temp = operands[1];
1555 operands[1] = operands[2];
1556 operands[2] = temp;
1557 }
1558
1559 /* If optimizing, copy to regs to improve CSE */
1560 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1561 {
1562 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1563 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1564
1565 if (GET_CODE (operands[2]) == MEM)
1566 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
0afeb08a
SC
1567
1568 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1569 {
1570 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1571 emit_move_insn (temp, operands[1]);
1572 operands[1] = temp;
1573 return TRUE;
1574 }
32b5b1aa
SC
1575 }
1576
1577 if (!ix86_binary_operator_ok (code, mode, operands))
1578 {
1579 /* If not optimizing, try to make a valid insn (optimize code previously did
1580 this above to improve chances of CSE) */
1581
1582 if ((!TARGET_PSEUDO || !optimize)
1583 && ((reload_in_progress | reload_completed) == 0)
1584 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1585 {
1586 modified = FALSE;
1587 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1588 {
1589 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1590 modified = TRUE;
1591 }
1592
1593 if (GET_CODE (operands[2]) == MEM)
1594 {
1595 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1596 modified = TRUE;
1597 }
1598
0afeb08a
SC
1599 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1600 {
1601 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1602 emit_move_insn (temp, operands[1]);
1603 operands[1] = temp;
1604 return TRUE;
1605 }
1606
32b5b1aa
SC
1607 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1608 return FALSE;
1609 }
1610 else
1611 return FALSE;
1612 }
1613
1614 return TRUE;
1615}
1616\f
1617/* Return TRUE or FALSE depending on whether the binary operator meets the
1618 appropriate constraints. */
1619
1620int
1621ix86_binary_operator_ok (code, mode, operands)
1622 enum rtx_code code;
1623 enum machine_mode mode;
1624 rtx operands[3];
1625{
29e8f73f
SC
1626 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1627 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
32b5b1aa
SC
1628}
1629\f
1630/* Attempt to expand a unary operator. Make the expansion closer to the
1631 actual machine, then just general_operand, which will allow 2 separate
1632 memory references (one output, one input) in a single insn. Return
1633 whether the insn fails, or succeeds. */
1634
1635int
1636ix86_expand_unary_operator (code, mode, operands)
1637 enum rtx_code code;
1638 enum machine_mode mode;
1639 rtx operands[];
1640{
1641 rtx insn;
1642
1643 /* If optimizing, copy to regs to improve CSE */
1644 if (TARGET_PSEUDO
1645 && optimize
1646 && ((reload_in_progress | reload_completed) == 0)
1647 && GET_CODE (operands[1]) == MEM)
1648 {
1649 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1650 }
1651
1652 if (!ix86_unary_operator_ok (code, mode, operands))
1653 {
1654 if ((!TARGET_PSEUDO || !optimize)
1655 && ((reload_in_progress | reload_completed) == 0)
1656 && GET_CODE (operands[1]) == MEM)
1657 {
1658 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1659 if (!ix86_unary_operator_ok (code, mode, operands))
1660 return FALSE;
1661 }
1662 else
1663 return FALSE;
1664 }
1665
1666 return TRUE;
1667}
1668\f
1669/* Return TRUE or FALSE depending on whether the unary operator meets the
1670 appropriate constraints. */
1671
1672int
1673ix86_unary_operator_ok (code, mode, operands)
1674 enum rtx_code code;
1675 enum machine_mode mode;
1676 rtx operands[2];
1677{
1678 return TRUE;
1679}
1680
2a2ab3f9 1681\f
e5cb57e8
SC
1682
1683static rtx pic_label_rtx;
1684
1685/* This function generates code for -fpic that loads %ebx with
1686 with the return address of the caller and then returns. */
1687void
1688asm_output_function_prefix (file, name)
1689 FILE * file;
1690 char * name;
1691{
1692 rtx xops[2];
1693 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1694 || current_function_uses_const_pool);
1695 xops[0] = pic_offset_table_rtx;
1696 xops[1] = stack_pointer_rtx;
1697
77a989d1 1698 /* deep branch prediction favors having a return for every call */
e5cb57e8
SC
1699 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1700 {
77a989d1 1701 if (pic_label_rtx == 0)
e5cb57e8
SC
1702 pic_label_rtx = (rtx) gen_label_rtx ();
1703 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (pic_label_rtx));
1704 output_asm_insn ("movl (%1),%0", xops);
1705 output_asm_insn ("ret", xops);
1706 }
1707}
1708
77a989d1 1709/* Set up the stack and frame (if desired) for the function. */
2a2ab3f9
JVA
1710
1711void
1712function_prologue (file, size)
1713 FILE *file;
1714 int size;
77a989d1 1715{
983f1685
SC
1716 register int regno;
1717 int limit;
1718 rtx xops[4];
1719 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1720 || current_function_uses_const_pool);
1721 long tsize = get_frame_size ();
1722
1723 /* pic references don't explicitly mention pic_offset_table_rtx */
1724 if (TARGET_SCHEDULE_PROLOGUE)
1725 return;
1726
1727 xops[0] = stack_pointer_rtx;
1728 xops[1] = frame_pointer_rtx;
1729 xops[2] = GEN_INT (tsize);
1730 if (frame_pointer_needed)
1731 {
1732 output_asm_insn ("push%L1 %1", xops);
1733 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1734 }
1735
1736 if (tsize)
1737 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1738
1739 /* Note If use enter it is NOT reversed args.
1740 This one is not reversed from intel!!
1741 I think enter is slower. Also sdb doesn't like it.
1742 But if you want it the code is:
1743 {
1744 xops[3] = const0_rtx;
1745 output_asm_insn ("enter %2,%3", xops);
1746 }
1747 */
1748 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1749 for (regno = limit - 1; regno >= 0; regno--)
1750 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1751 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1752 {
1753 xops[0] = gen_rtx (REG, SImode, regno);
1754 output_asm_insn ("push%L0 %0", xops);
1755 }
1756
1757 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1758 {
1759 xops[0] = pic_offset_table_rtx;
1760 if (pic_label_rtx == 0)
1761 pic_label_rtx = (rtx) gen_label_rtx ();
1762 xops[1] = pic_label_rtx;
1763
1764 output_asm_insn (AS1 (call,%P1), xops);
1765 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1766 }
1767 else if (pic_reg_used)
1768 {
1769 xops[0] = pic_offset_table_rtx;
1770 xops[1] = (rtx) gen_label_rtx ();
1771
1772 output_asm_insn (AS1 (call,%P1), xops);
1773 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1774 output_asm_insn (AS1 (pop%L0,%0), xops);
1775 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1776 }
77a989d1
SC
1777}
1778
1779/* This function generates the assembly code for function entry.
1780 FILE is an stdio stream to output the code to.
1781 SIZE is an int: how many units of temporary storage to allocate. */
1782
1783void
1784ix86_expand_prologue ()
2a2ab3f9
JVA
1785{
1786 register int regno;
1787 int limit;
1788 rtx xops[4];
aae75261
JVA
1789 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1790 || current_function_uses_const_pool);
77a989d1 1791 long tsize = get_frame_size ();
2a2ab3f9 1792
983f1685
SC
1793 if (!TARGET_SCHEDULE_PROLOGUE)
1794 return;
1795
2a2ab3f9
JVA
1796 xops[0] = stack_pointer_rtx;
1797 xops[1] = frame_pointer_rtx;
77a989d1 1798 xops[2] = GEN_INT (tsize);
2a2ab3f9
JVA
1799 if (frame_pointer_needed)
1800 {
77a989d1
SC
1801 emit_insn (gen_rtx (SET, 0,
1802 gen_rtx (MEM, SImode,
1803 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1804 frame_pointer_rtx));
1805 emit_move_insn (xops[1], xops[0]);
2a2ab3f9
JVA
1806 }
1807
77a989d1
SC
1808 if (tsize)
1809 emit_insn (gen_rtx (SET, SImode,
1810 xops[0],
1811 gen_rtx (MINUS, SImode,
1812 xops[0],
1813 xops[2])));
1814
2a2ab3f9
JVA
1815 /* Note If use enter it is NOT reversed args.
1816 This one is not reversed from intel!!
1817 I think enter is slower. Also sdb doesn't like it.
1818 But if you want it the code is:
1819 {
1820 xops[3] = const0_rtx;
1821 output_asm_insn ("enter %2,%3", xops);
1822 }
1823 */
1824 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1825 for (regno = limit - 1; regno >= 0; regno--)
1826 if ((regs_ever_live[regno] && ! call_used_regs[regno])
aae75261 1827 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2a2ab3f9
JVA
1828 {
1829 xops[0] = gen_rtx (REG, SImode, regno);
77a989d1
SC
1830 emit_insn (gen_rtx (SET, 0,
1831 gen_rtx (MEM, SImode,
1832 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1833 xops[0]));
2a2ab3f9
JVA
1834 }
1835
77a989d1 1836 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
2a2ab3f9
JVA
1837 {
1838 xops[0] = pic_offset_table_rtx;
77a989d1
SC
1839 if (pic_label_rtx == 0)
1840 pic_label_rtx = (rtx) gen_label_rtx ();
e5cb57e8 1841 xops[1] = pic_label_rtx;
2a2ab3f9 1842
77a989d1 1843 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
77a989d1
SC
1844 emit_insn (gen_prologue_set_got (xops[0],
1845 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1846 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2a2ab3f9 1847 }
77a989d1
SC
1848 else if (pic_reg_used)
1849 {
e5cb57e8
SC
1850 xops[0] = pic_offset_table_rtx;
1851 xops[1] = (rtx) gen_label_rtx ();
1852
77a989d1 1853 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
77a989d1 1854 emit_insn (gen_pop (xops[0]));
77a989d1
SC
1855 emit_insn (gen_prologue_set_got (xops[0],
1856 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1857 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
e5cb57e8 1858 }
2a2ab3f9
JVA
1859}
1860
77a989d1
SC
1861/* Restore function stack, frame, and registers. */
1862
1863void
1864function_epilogue (file, size)
1865 FILE *file;
1866 int size;
1867{
1868}
1869
2a2ab3f9
JVA
1870/* Return 1 if it is appropriate to emit `ret' instructions in the
1871 body of a function. Do this only if the epilogue is simple, needing a
1872 couple of insns. Prior to reloading, we can't tell how many registers
77a989d1
SC
1873 must be saved, so return 0 then. Return 0 if there is no frame
1874 marker to de-allocate.
2a2ab3f9
JVA
1875
1876 If NON_SAVING_SETJMP is defined and true, then it is not possible
1877 for the epilogue to be simple, so return 0. This is a special case
77a989d1
SC
1878 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1879 until final, but jump_optimize may need to know sooner if a
1880 `return' is OK. */
2a2ab3f9
JVA
1881
1882int
77a989d1 1883ix86_can_use_return_insn_p ()
2a2ab3f9
JVA
1884{
1885 int regno;
1886 int nregs = 0;
1887 int reglimit = (frame_pointer_needed
1888 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
aae75261
JVA
1889 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1890 || current_function_uses_const_pool);
2a2ab3f9
JVA
1891
1892#ifdef NON_SAVING_SETJMP
1893 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1894 return 0;
1895#endif
1896
1897 if (! reload_completed)
1898 return 0;
1899
1900 for (regno = reglimit - 1; regno >= 0; regno--)
1901 if ((regs_ever_live[regno] && ! call_used_regs[regno])
aae75261 1902 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2a2ab3f9
JVA
1903 nregs++;
1904
1905 return nregs == 0 || ! frame_pointer_needed;
1906}
1907
3b3c6a3f 1908\f
2a2ab3f9
JVA
1909/* This function generates the assembly code for function exit.
1910 FILE is an stdio stream to output the code to.
1911 SIZE is an int: how many units of temporary storage to deallocate. */
1912
1913void
77a989d1 1914ix86_expand_epilogue ()
2a2ab3f9
JVA
1915{
1916 register int regno;
1917 register int nregs, limit;
1918 int offset;
1919 rtx xops[3];
aae75261
JVA
1920 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1921 || current_function_uses_const_pool);
77a989d1 1922 long tsize = get_frame_size ();
2a2ab3f9
JVA
1923
1924 /* Compute the number of registers to pop */
1925
1926 limit = (frame_pointer_needed
1927 ? FRAME_POINTER_REGNUM
1928 : STACK_POINTER_REGNUM);
1929
1930 nregs = 0;
1931
1932 for (regno = limit - 1; regno >= 0; regno--)
1933 if ((regs_ever_live[regno] && ! call_used_regs[regno])
aae75261 1934 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2a2ab3f9
JVA
1935 nregs++;
1936
1937 /* sp is often unreliable so we must go off the frame pointer,
1938 */
1939
1940 /* In reality, we may not care if sp is unreliable, because we can
1941 restore the register relative to the frame pointer. In theory,
1942 since each move is the same speed as a pop, and we don't need the
1943 leal, this is faster. For now restore multiple registers the old
1944 way. */
1945
77a989d1 1946 offset = -tsize - (nregs * UNITS_PER_WORD);
2a2ab3f9
JVA
1947
1948 xops[2] = stack_pointer_rtx;
1949
1950 if (nregs > 1 || ! frame_pointer_needed)
1951 {
1952 if (frame_pointer_needed)
1953 {
77a989d1
SC
1954 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
1955 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
1956/* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2a2ab3f9
JVA
1957 }
1958
1959 for (regno = 0; regno < limit; regno++)
1960 if ((regs_ever_live[regno] && ! call_used_regs[regno])
aae75261 1961 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2a2ab3f9
JVA
1962 {
1963 xops[0] = gen_rtx (REG, SImode, regno);
77a989d1
SC
1964 emit_insn (gen_pop (xops[0]));
1965/* output_asm_insn ("pop%L0 %0", xops);*/
2a2ab3f9
JVA
1966 }
1967 }
1968 else
1969 for (regno = 0; regno < limit; regno++)
1970 if ((regs_ever_live[regno] && ! call_used_regs[regno])
aae75261 1971 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2a2ab3f9
JVA
1972 {
1973 xops[0] = gen_rtx (REG, SImode, regno);
1974 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
77a989d1
SC
1975 emit_move_insn (xops[0], xops[1]);
1976/* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2a2ab3f9
JVA
1977 offset += 4;
1978 }
1979
1980 if (frame_pointer_needed)
1981 {
c8c5cb99 1982 /* If not an i386, mov & pop is faster than "leave". */
2a2ab3f9 1983
3f803cd9 1984 if (TARGET_USE_LEAVE)
77a989d1
SC
1985 emit_insn (gen_leave());
1986/* output_asm_insn ("leave", xops);*/
c8c5cb99 1987 else
2a2ab3f9
JVA
1988 {
1989 xops[0] = frame_pointer_rtx;
77a989d1 1990 xops[1] = stack_pointer_rtx;
b1060ee3 1991 emit_insn (gen_epilogue_set_stack_ptr());
77a989d1
SC
1992/* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
1993 emit_insn (gen_pop (xops[0]));
1994/* output_asm_insn ("pop%L0 %0", xops);*/
2a2ab3f9 1995 }
2a2ab3f9 1996 }
77a989d1 1997 else if (tsize)
2a2ab3f9
JVA
1998 {
1999 /* If there is no frame pointer, we must still release the frame. */
2000
77a989d1
SC
2001 xops[0] = GEN_INT (tsize);
2002 emit_insn (gen_rtx (SET, SImode,
2003 xops[2],
2004 gen_rtx (PLUS, SImode,
2005 xops[2],
2006 xops[0])));
2007/* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2a2ab3f9
JVA
2008 }
2009
68f654ec
RK
2010#ifdef FUNCTION_BLOCK_PROFILER_EXIT
2011 if (profile_block_flag == 2)
2012 {
2013 FUNCTION_BLOCK_PROFILER_EXIT(file);
2014 }
2015#endif
2016
2a2ab3f9
JVA
2017 if (current_function_pops_args && current_function_args_size)
2018 {
435defd1 2019 xops[1] = GEN_INT (current_function_pops_args);
2a2ab3f9
JVA
2020
2021 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2022 asked to pop more, pop return address, do explicit add, and jump
2023 indirectly to the caller. */
2024
2025 if (current_function_pops_args >= 32768)
2026 {
2027 /* ??? Which register to use here? */
2028 xops[0] = gen_rtx (REG, SImode, 2);
77a989d1
SC
2029 emit_insn (gen_pop (xops[0]));
2030/* output_asm_insn ("pop%L0 %0", xops);*/
2031 emit_insn (gen_rtx (SET, SImode,
2032 xops[2],
2033 gen_rtx (PLUS, SImode,
2034 xops[1],
2035 xops[2])));
2036/* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2037 emit_jump_insn (xops[0]);
2038/* output_asm_insn ("jmp %*%0", xops);*/
2a2ab3f9
JVA
2039 }
2040 else
7fca80d0 2041 emit_jump_insn (gen_return_pop_internal (xops[1]));
77a989d1 2042/* output_asm_insn ("ret %1", xops);*/
2a2ab3f9 2043 }
2a2ab3f9 2044 else
7fca80d0 2045/* output_asm_insn ("ret", xops);*/
77a989d1 2046 emit_jump_insn (gen_return_internal ());
2a2ab3f9 2047}
3b3c6a3f
MM
2048
2049\f
2050/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2051 that is a valid memory address for an instruction.
2052 The MODE argument is the machine mode for the MEM expression
2053 that wants to use this address.
2054
2055 On x86, legitimate addresses are:
2056 base movl (base),reg
2057 displacement movl disp,reg
2058 base + displacement movl disp(base),reg
2059 index + base movl (base,index),reg
2060 (index + base) + displacement movl disp(base,index),reg
2061 index*scale movl (,index,scale),reg
2062 index*scale + disp movl disp(,index,scale),reg
2063 index*scale + base movl (base,index,scale),reg
2064 (index*scale + base) + disp movl disp(base,index,scale),reg
2065
2066 In each case, scale can be 1, 2, 4, 8. */
2067
2068/* This is exactly the same as print_operand_addr, except that
2069 it recognizes addresses instead of printing them.
2070
2071 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2072 convert common non-canonical forms to canonical form so that they will
2073 be recognized. */
2074
2075#define ADDR_INVALID(msg,insn) \
2076do { \
2077 if (TARGET_DEBUG_ADDR) \
2078 { \
2079 fprintf (stderr, msg); \
2080 debug_rtx (insn); \
2081 } \
2082} while (0)
2083
2084int
2085legitimate_address_p (mode, addr, strict)
2086 enum machine_mode mode;
2087 register rtx addr;
2088 int strict;
2089{
2090 rtx base = NULL_RTX;
2091 rtx indx = NULL_RTX;
2092 rtx scale = NULL_RTX;
2093 rtx disp = NULL_RTX;
2094
2095 if (TARGET_DEBUG_ADDR)
2096 {
2097 fprintf (stderr,
2098 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2099 GET_MODE_NAME (mode), strict);
2100
2101 debug_rtx (addr);
2102 }
2103
2104 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2105 base = addr; /* base reg */
2106
2107 else if (GET_CODE (addr) == PLUS)
2108 {
2109 rtx op0 = XEXP (addr, 0);
2110 rtx op1 = XEXP (addr, 1);
2111 enum rtx_code code0 = GET_CODE (op0);
2112 enum rtx_code code1 = GET_CODE (op1);
2113
2114 if (code0 == REG || code0 == SUBREG)
2115 {
2116 if (code1 == REG || code1 == SUBREG)
2117 {
2118 indx = op0; /* index + base */
2119 base = op1;
2120 }
2121
2122 else
2123 {
2124 base = op0; /* base + displacement */
2125 disp = op1;
2126 }
2127 }
2128
2129 else if (code0 == MULT)
2130 {
2131 indx = XEXP (op0, 0);
2132 scale = XEXP (op0, 1);
2133
2134 if (code1 == REG || code1 == SUBREG)
2135 base = op1; /* index*scale + base */
2136
2137 else
2138 disp = op1; /* index*scale + disp */
2139 }
2140
2141 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2142 {
2143 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2144 scale = XEXP (XEXP (op0, 0), 1);
2145 base = XEXP (op0, 1);
2146 disp = op1;
2147 }
2148
2149 else if (code0 == PLUS)
2150 {
2151 indx = XEXP (op0, 0); /* index + base + disp */
2152 base = XEXP (op0, 1);
2153 disp = op1;
2154 }
2155
2156 else
2157 {
2158 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2159 return FALSE;
2160 }
2161 }
2162
2163 else if (GET_CODE (addr) == MULT)
2164 {
2165 indx = XEXP (addr, 0); /* index*scale */
2166 scale = XEXP (addr, 1);
2167 }
2168
2169 else
2170 disp = addr; /* displacement */
2171
91f0226f
MM
2172 /* Allow arg pointer and stack pointer as index if there is not scaling */
2173 if (base && indx && !scale
2174 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2175 {
2176 rtx tmp = base;
2177 base = indx;
2178 indx = tmp;
2179 }
2180
3b3c6a3f 2181 /* Validate base register */
3d771dfd
MM
2182 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2183 is one word out of a two word structure, which is represented internally
2184 as a DImode int. */
3b3c6a3f
MM
2185 if (base)
2186 {
3d771dfd 2187 if (GET_CODE (base) != REG)
3b3c6a3f 2188 {
3d771dfd
MM
2189 ADDR_INVALID ("Base is not a register.\n", base);
2190 return FALSE;
3b3c6a3f
MM
2191 }
2192
3d771dfd 2193 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
3b3c6a3f
MM
2194 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2195 {
2196 ADDR_INVALID ("Base is not valid.\n", base);
2197 return FALSE;
2198 }
2199 }
2200
2201 /* Validate index register */
3d771dfd
MM
2202 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2203 is one word out of a two word structure, which is represented internally
2204 as a DImode int. */
3b3c6a3f
MM
2205 if (indx)
2206 {
3d771dfd 2207 if (GET_CODE (indx) != REG)
3b3c6a3f 2208 {
3d771dfd
MM
2209 ADDR_INVALID ("Index is not a register.\n", indx);
2210 return FALSE;
3b3c6a3f
MM
2211 }
2212
3d771dfd 2213 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
3b3c6a3f
MM
2214 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2215 {
2216 ADDR_INVALID ("Index is not valid.\n", indx);
2217 return FALSE;
2218 }
2219 }
2220 else if (scale)
01554f00 2221 abort (); /* scale w/o index invalid */
3b3c6a3f
MM
2222
2223 /* Validate scale factor */
2224 if (scale)
2225 {
2226 HOST_WIDE_INT value;
2227
2228 if (GET_CODE (scale) != CONST_INT)
2229 {
2230 ADDR_INVALID ("Scale is not valid.\n", scale);
2231 return FALSE;
2232 }
2233
2234 value = INTVAL (scale);
2235 if (value != 1 && value != 2 && value != 4 && value != 8)
2236 {
2237 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2238 return FALSE;
2239 }
2240 }
2241
32b5b1aa
SC
2242 /* Validate displacement
2243 Constant pool addresses must be handled special. They are
2244 considered legitimate addresses, but only if not used with regs.
2245 When printed, the output routines know to print the reference with the
2246 PIC reg, even though the PIC reg doesn't appear in the RTL. */
3b3c6a3f
MM
2247 if (disp)
2248 {
32b5b1aa
SC
2249 if (GET_CODE (disp) == SYMBOL_REF
2250 && CONSTANT_POOL_ADDRESS_P (disp)
2251 && !base
2252 && !indx)
2253 ;
2254
2255 else if (!CONSTANT_ADDRESS_P (disp))
3b3c6a3f
MM
2256 {
2257 ADDR_INVALID ("Displacement is not valid.\n", disp);
2258 return FALSE;
2259 }
2260
32b5b1aa 2261 else if (GET_CODE (disp) == CONST_DOUBLE)
3b3c6a3f
MM
2262 {
2263 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2264 return FALSE;
2265 }
2266
32b5b1aa
SC
2267 else if (flag_pic && SYMBOLIC_CONST (disp)
2268 && base != pic_offset_table_rtx
2269 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
3b3c6a3f
MM
2270 {
2271 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2272 return FALSE;
2273 }
2274
32b5b1aa
SC
2275 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2276 && (base != NULL_RTX || indx != NULL_RTX))
3b3c6a3f
MM
2277 {
2278 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2279 return FALSE;
2280 }
2281 }
2282
2283 if (TARGET_DEBUG_ADDR)
2284 fprintf (stderr, "Address is valid.\n");
2285
2286 /* Everything looks valid, return true */
2287 return TRUE;
2288}
2289
2290\f
2291/* Return a legitimate reference for ORIG (an address) using the
2292 register REG. If REG is 0, a new pseudo is generated.
2293
2294 There are three types of references that must be handled:
2295
2296 1. Global data references must load the address from the GOT, via
2297 the PIC reg. An insn is emitted to do this load, and the reg is
2298 returned.
2299
2300 2. Static data references must compute the address as an offset
2301 from the GOT, whose base is in the PIC reg. An insn is emitted to
2302 compute the address into a reg, and the reg is returned. Static
2303 data objects have SYMBOL_REF_FLAG set to differentiate them from
2304 global data objects.
2305
2306 3. Constant pool addresses must be handled special. They are
2307 considered legitimate addresses, but only if not used with regs.
2308 When printed, the output routines know to print the reference with the
2309 PIC reg, even though the PIC reg doesn't appear in the RTL.
2310
2311 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2312 reg also appears in the address (except for constant pool references,
2313 noted above).
2314
2315 "switch" statements also require special handling when generating
2316 PIC code. See comments by the `casesi' insn in i386.md for details. */
2317
2318rtx
2319legitimize_pic_address (orig, reg)
2320 rtx orig;
2321 rtx reg;
2322{
2323 rtx addr = orig;
2324 rtx new = orig;
2325
2326 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2327 {
2328 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2329 reg = new = orig;
2330 else
2331 {
2332 if (reg == 0)
2333 reg = gen_reg_rtx (Pmode);
2334
c399861d
MM
2335 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2336 || GET_CODE (addr) == LABEL_REF)
3b3c6a3f
MM
2337 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2338 else
2339 new = gen_rtx (MEM, Pmode,
2340 gen_rtx (PLUS, Pmode,
2341 pic_offset_table_rtx, orig));
2342
2343 emit_move_insn (reg, new);
2344 }
2345 current_function_uses_pic_offset_table = 1;
2346 return reg;
2347 }
2348 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2349 {
2350 rtx base;
2351
2352 if (GET_CODE (addr) == CONST)
2353 {
2354 addr = XEXP (addr, 0);
2355 if (GET_CODE (addr) != PLUS)
2356 abort ();
2357 }
2358
2359 if (XEXP (addr, 0) == pic_offset_table_rtx)
2360 return orig;
2361
2362 if (reg == 0)
2363 reg = gen_reg_rtx (Pmode);
2364
2365 base = legitimize_pic_address (XEXP (addr, 0), reg);
2366 addr = legitimize_pic_address (XEXP (addr, 1),
2367 base == reg ? NULL_RTX : reg);
2368
2369 if (GET_CODE (addr) == CONST_INT)
2370 return plus_constant (base, INTVAL (addr));
2371
2372 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2373 {
2374 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2375 addr = XEXP (addr, 1);
2376 }
2377 return gen_rtx (PLUS, Pmode, base, addr);
2378 }
2379 return new;
2380}
2381\f
2382
2383/* Emit insns to move operands[1] into operands[0]. */
2384
2385void
2386emit_pic_move (operands, mode)
2387 rtx *operands;
2388 enum machine_mode mode;
2389{
2390 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2391
2392 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2393 operands[1] = (rtx) force_reg (SImode, operands[1]);
2394 else
2395 operands[1] = legitimize_pic_address (operands[1], temp);
2396}
2397
2398\f
2399/* Try machine-dependent ways of modifying an illegitimate address
2400 to be legitimate. If we find one, return the new, valid address.
2401 This macro is used in only one place: `memory_address' in explow.c.
2402
2403 OLDX is the address as it was before break_out_memory_refs was called.
2404 In some cases it is useful to look at this to decide what needs to be done.
2405
2406 MODE and WIN are passed so that this macro can use
2407 GO_IF_LEGITIMATE_ADDRESS.
2408
2409 It is always safe for this macro to do nothing. It exists to recognize
2410 opportunities to optimize the output.
2411
2412 For the 80386, we handle X+REG by loading X into a register R and
2413 using R+REG. R will go in a general reg and indexing will be used.
2414 However, if REG is a broken-out memory address or multiplication,
2415 nothing needs to be done because REG can certainly go in a general reg.
2416
2417 When -fpic is used, special handling is needed for symbolic references.
2418 See comments by legitimize_pic_address in i386.c for details. */
2419
2420rtx
2421legitimize_address (x, oldx, mode)
2422 register rtx x;
2423 register rtx oldx;
2424 enum machine_mode mode;
2425{
2426 int changed = 0;
2427 unsigned log;
2428
2429 if (TARGET_DEBUG_ADDR)
2430 {
2431 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2432 debug_rtx (x);
2433 }
2434
2435 if (flag_pic && SYMBOLIC_CONST (x))
2436 return legitimize_pic_address (x, 0);
2437
2438 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2439 if (GET_CODE (x) == ASHIFT
2440 && GET_CODE (XEXP (x, 1)) == CONST_INT
2441 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2442 {
2443 changed = 1;
2444 x = gen_rtx (MULT, Pmode,
2445 force_reg (Pmode, XEXP (x, 0)),
2446 GEN_INT (1 << log));
2447 }
2448
2449 if (GET_CODE (x) == PLUS)
2450 {
2451 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2452 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2453 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2454 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2455 {
2456 changed = 1;
2457 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2458 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2459 GEN_INT (1 << log));
2460 }
2461
2462 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2463 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2464 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2465 {
2466 changed = 1;
2467 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2468 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2469 GEN_INT (1 << log));
2470 }
2471
2472 /* Put multiply first if it isn't already */
2473 if (GET_CODE (XEXP (x, 1)) == MULT)
2474 {
2475 rtx tmp = XEXP (x, 0);
2476 XEXP (x, 0) = XEXP (x, 1);
2477 XEXP (x, 1) = tmp;
2478 changed = 1;
2479 }
2480
2481 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2482 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2483 created by virtual register instantiation, register elimination, and
2484 similar optimizations. */
2485 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2486 {
2487 changed = 1;
2488 x = gen_rtx (PLUS, Pmode,
2489 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2490 XEXP (XEXP (x, 1), 1));
2491 }
2492
2493 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2494 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2495 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2496 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2497 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2498 && CONSTANT_P (XEXP (x, 1)))
2499 {
2500 rtx constant, other;
2501
2502 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2503 {
2504 constant = XEXP (x, 1);
2505 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2506 }
2507 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2508 {
2509 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2510 other = XEXP (x, 1);
2511 }
2512 else
2513 constant = 0;
2514
2515 if (constant)
2516 {
2517 changed = 1;
2518 x = gen_rtx (PLUS, Pmode,
2519 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2520 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2521 plus_constant (other, INTVAL (constant)));
2522 }
2523 }
2524
2525 if (changed && legitimate_address_p (mode, x, FALSE))
2526 return x;
2527
2528 if (GET_CODE (XEXP (x, 0)) == MULT)
2529 {
2530 changed = 1;
2531 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2532 }
2533
2534 if (GET_CODE (XEXP (x, 1)) == MULT)
2535 {
2536 changed = 1;
2537 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2538 }
2539
2540 if (changed
2541 && GET_CODE (XEXP (x, 1)) == REG
2542 && GET_CODE (XEXP (x, 0)) == REG)
2543 return x;
2544
2545 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2546 {
2547 changed = 1;
2548 x = legitimize_pic_address (x, 0);
2549 }
2550
2551 if (changed && legitimate_address_p (mode, x, FALSE))
2552 return x;
2553
2554 if (GET_CODE (XEXP (x, 0)) == REG)
2555 {
2556 register rtx temp = gen_reg_rtx (Pmode);
2557 register rtx val = force_operand (XEXP (x, 1), temp);
2558 if (val != temp)
2559 emit_move_insn (temp, val);
2560
2561 XEXP (x, 1) = temp;
2562 return x;
2563 }
2564
2565 else if (GET_CODE (XEXP (x, 1)) == REG)
2566 {
2567 register rtx temp = gen_reg_rtx (Pmode);
2568 register rtx val = force_operand (XEXP (x, 0), temp);
2569 if (val != temp)
2570 emit_move_insn (temp, val);
2571
2572 XEXP (x, 0) = temp;
2573 return x;
2574 }
2575 }
2576
2577 return x;
2578}
2579
2a2ab3f9
JVA
2580\f
2581/* Print an integer constant expression in assembler syntax. Addition
2582 and subtraction are the only arithmetic that may appear in these
2583 expressions. FILE is the stdio stream to write to, X is the rtx, and
2584 CODE is the operand print code from the output string. */
2585
2586static void
2587output_pic_addr_const (file, x, code)
2588 FILE *file;
2589 rtx x;
2590 int code;
2591{
2592 char buf[256];
2593
2594 switch (GET_CODE (x))
2595 {
2596 case PC:
2597 if (flag_pic)
2598 putc ('.', file);
2599 else
2600 abort ();
2601 break;
2602
2603 case SYMBOL_REF:
2604 case LABEL_REF:
2605 if (GET_CODE (x) == SYMBOL_REF)
2606 assemble_name (file, XSTR (x, 0));
2607 else
2608 {
2609 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2610 CODE_LABEL_NUMBER (XEXP (x, 0)));
2611 assemble_name (asm_out_file, buf);
2612 }
2613
2614 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2615 fprintf (file, "@GOTOFF(%%ebx)");
2616 else if (code == 'P')
2617 fprintf (file, "@PLT");
c399861d
MM
2618 else if (GET_CODE (x) == LABEL_REF)
2619 fprintf (file, "@GOTOFF");
2620 else if (! SYMBOL_REF_FLAG (x))
2a2ab3f9
JVA
2621 fprintf (file, "@GOT");
2622 else
2623 fprintf (file, "@GOTOFF");
2624
2625 break;
2626
2627 case CODE_LABEL:
2628 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2629 assemble_name (asm_out_file, buf);
2630 break;
2631
2632 case CONST_INT:
2633 fprintf (file, "%d", INTVAL (x));
2634 break;
2635
2636 case CONST:
2637 /* This used to output parentheses around the expression,
2638 but that does not work on the 386 (either ATT or BSD assembler). */
2639 output_pic_addr_const (file, XEXP (x, 0), code);
2640 break;
2641
2642 case CONST_DOUBLE:
2643 if (GET_MODE (x) == VOIDmode)
2644 {
2645 /* We can use %d if the number is <32 bits and positive. */
2646 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2647 fprintf (file, "0x%x%08x",
2648 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2649 else
2650 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2651 }
2652 else
2653 /* We can't handle floating point constants;
2654 PRINT_OPERAND must handle them. */
2655 output_operand_lossage ("floating constant misused");
2656 break;
2657
2658 case PLUS:
2659 /* Some assemblers need integer constants to appear last (eg masm). */
2660 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2661 {
2662 output_pic_addr_const (file, XEXP (x, 1), code);
2663 if (INTVAL (XEXP (x, 0)) >= 0)
2664 fprintf (file, "+");
2665 output_pic_addr_const (file, XEXP (x, 0), code);
2666 }
2667 else
2668 {
2669 output_pic_addr_const (file, XEXP (x, 0), code);
2670 if (INTVAL (XEXP (x, 1)) >= 0)
2671 fprintf (file, "+");
2672 output_pic_addr_const (file, XEXP (x, 1), code);
2673 }
2674 break;
2675
2676 case MINUS:
2677 output_pic_addr_const (file, XEXP (x, 0), code);
2678 fprintf (file, "-");
2679 output_pic_addr_const (file, XEXP (x, 1), code);
2680 break;
2681
2682 default:
2683 output_operand_lossage ("invalid expression as operand");
2684 }
2685}
2686\f
e5cb57e8
SC
2687
2688/* Append the correct conditional move suffix which corresponds to CODE */
2689
2690static void
2691put_condition_code (code, file)
2692 enum rtx_code code;
2693 FILE * file;
2694{
2695 switch (code)
2696 {
2697 case NE:
2698 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2699 fputs ("b", file);
2700 else
2701 fputs ("ne", file);
2702 return;
2703 case EQ:
2704 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2705 fputs ("ae", file);
2706 else
2707 fputs ("e", file);
2708 return;
2709 case GE:
2710 fputs ("ge", file); return;
2711 case GT:
2712 fputs ("g", file); return;
2713 case LE:
2714 fputs ("le", file); return;
2715 case LT:
2716 fputs ("l", file); return;
2717 case GEU:
2718 fputs ("ae", file); return;
2719 case GTU:
2720 fputs ("a", file); return;
2721 case LEU:
2722 fputs ("be", file); return;
2723 case LTU:
2724 fputs ("b", file); return;
2725 default: output_operand_lossage ("Invalid %%C operand");
2726 }
2727}
2728
2a2ab3f9
JVA
2729/* Meaning of CODE:
2730 f -- float insn (print a CONST_DOUBLE as a float rather than in hex).
2731 D,L,W,B,Q,S -- print the opcode suffix for specified size of operand.
e5cb57e8
SC
2732 C -- print opcode suffix for set/cmov insn.
2733 N -- like C, but print reversed condition
2a2ab3f9
JVA
2734 R -- print the prefix for register names.
2735 z -- print the opcode suffix for the size of the current operand.
2736 * -- print a star (in certain assembler syntax)
2737 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2738 c -- don't print special prefixes before constant operands.
b08de47e 2739 J -- print the appropriate jump operand.
2d49677f
SC
2740 s -- print a shift double count, followed by the assemblers argument
2741 delimiter.
2a2ab3f9
JVA
2742*/
2743
2744void
2745print_operand (file, x, code)
2746 FILE *file;
2747 rtx x;
2748 int code;
2749{
2750 if (code)
2751 {
2752 switch (code)
2753 {
2754 case '*':
2755 if (USE_STAR)
2756 putc ('*', file);
2757 return;
2758
2a2ab3f9
JVA
2759 case 'L':
2760 PUT_OP_SIZE (code, 'l', file);
2761 return;
2762
2763 case 'W':
2764 PUT_OP_SIZE (code, 'w', file);
2765 return;
2766
2767 case 'B':
2768 PUT_OP_SIZE (code, 'b', file);
2769 return;
2770
2771 case 'Q':
2772 PUT_OP_SIZE (code, 'l', file);
2773 return;
2774
2775 case 'S':
2776 PUT_OP_SIZE (code, 's', file);
2777 return;
2778
5f1ec3e6
JVA
2779 case 'T':
2780 PUT_OP_SIZE (code, 't', file);
2781 return;
2782
2a2ab3f9
JVA
2783 case 'z':
2784 /* 387 opcodes don't get size suffixes if the operands are
2785 registers. */
2786
2787 if (STACK_REG_P (x))
2788 return;
2789
2790 /* this is the size of op from size of operand */
2791 switch (GET_MODE_SIZE (GET_MODE (x)))
2792 {
2793 case 1:
2794 PUT_OP_SIZE ('B', 'b', file);
2795 return;
2796
2797 case 2:
2798 PUT_OP_SIZE ('W', 'w', file);
2799 return;
2800
2801 case 4:
2802 if (GET_MODE (x) == SFmode)
2803 {
2804 PUT_OP_SIZE ('S', 's', file);
2805 return;
2806 }
2807 else
2808 PUT_OP_SIZE ('L', 'l', file);
2809 return;
2810
5f1ec3e6
JVA
2811 case 12:
2812 PUT_OP_SIZE ('T', 't', file);
2813 return;
2814
2a2ab3f9
JVA
2815 case 8:
2816 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
56c0e8fa
JVA
2817 {
2818#ifdef GAS_MNEMONICS
2819 PUT_OP_SIZE ('Q', 'q', file);
2820 return;
2821#else
2822 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2823#endif
2824 }
2a2ab3f9
JVA
2825
2826 PUT_OP_SIZE ('Q', 'l', file);
2827 return;
2828 }
4af3895e
JVA
2829
2830 case 'b':
2831 case 'w':
2832 case 'k':
2833 case 'h':
2834 case 'y':
2835 case 'P':
2836 break;
2837
b08de47e
MM
2838 case 'J':
2839 switch (GET_CODE (x))
2840 {
c645b1c9
MM
2841 /* These conditions are appropriate for testing the result
2842 of an arithmetic operation, not for a compare operation.
2843 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2844 CC_Z_IN_NOT_C false and not floating point. */
b08de47e
MM
2845 case NE: fputs ("jne", file); return;
2846 case EQ: fputs ("je", file); return;
c645b1c9 2847 case GE: fputs ("jns", file); return;
c645b1c9 2848 case LT: fputs ("js", file); return;
d784886d
RK
2849 case GEU: fputs ("jmp", file); return;
2850 case GTU: fputs ("jne", file); return;
2851 case LEU: fputs ("je", file); return;
2852 case LTU: fputs ("#branch never", file); return;
2853
2854 /* no matching branches for GT nor LE */
b08de47e
MM
2855 }
2856 abort ();
2857
2d49677f
SC
2858 case 's':
2859 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2860 {
2861 PRINT_OPERAND (file, x, 0);
2862 fputs (AS2C (,) + 1, file);
2863 }
2864 return;
2865
1853aadd
RK
2866 /* This is used by the conditional move instructions. */
2867 case 'C':
2868 put_condition_code (GET_CODE (x), file);
2869 return;
2870 /* like above, but reverse condition */
2871 case 'N':
2872 put_condition_code (reverse_condition (GET_CODE (x)), file);
2873 return;
e5cb57e8 2874
4af3895e 2875 default:
68daafd4
JVA
2876 {
2877 char str[50];
2878
2879 sprintf (str, "invalid operand code `%c'", code);
2880 output_operand_lossage (str);
2881 }
2a2ab3f9
JVA
2882 }
2883 }
2884 if (GET_CODE (x) == REG)
2885 {
2886 PRINT_REG (x, code, file);
2887 }
2888 else if (GET_CODE (x) == MEM)
2889 {
2890 PRINT_PTR (x, file);
2891 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2892 {
2893 if (flag_pic)
2894 output_pic_addr_const (file, XEXP (x, 0), code);
2895 else
2896 output_addr_const (file, XEXP (x, 0));
2897 }
2898 else
2899 output_address (XEXP (x, 0));
2900 }
2901 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
2902 {
5f1ec3e6
JVA
2903 REAL_VALUE_TYPE r; long l;
2904 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2905 REAL_VALUE_TO_TARGET_SINGLE (r, l);
4af3895e 2906 PRINT_IMMED_PREFIX (file);
5f1ec3e6
JVA
2907 fprintf (file, "0x%x", l);
2908 }
2909 /* These float cases don't actually occur as immediate operands. */
2910 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
2911 {
2912 REAL_VALUE_TYPE r; char dstr[30];
2913 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2914 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2915 fprintf (file, "%s", dstr);
2a2ab3f9 2916 }
5f1ec3e6 2917 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
2a2ab3f9 2918 {
5f1ec3e6
JVA
2919 REAL_VALUE_TYPE r; char dstr[30];
2920 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2921 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2922 fprintf (file, "%s", dstr);
2a2ab3f9
JVA
2923 }
2924 else
2925 {
4af3895e 2926 if (code != 'P')
2a2ab3f9 2927 {
695dac07 2928 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2a2ab3f9
JVA
2929 PRINT_IMMED_PREFIX (file);
2930 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
2931 || GET_CODE (x) == LABEL_REF)
2932 PRINT_OFFSET_PREFIX (file);
2933 }
2934 if (flag_pic)
2935 output_pic_addr_const (file, x, code);
2936 else
2937 output_addr_const (file, x);
2938 }
2939}
2940\f
2941/* Print a memory operand whose address is ADDR. */
2942
2943void
2944print_operand_address (file, addr)
2945 FILE *file;
2946 register rtx addr;
2947{
2948 register rtx reg1, reg2, breg, ireg;
2949 rtx offset;
2950
2951 switch (GET_CODE (addr))
2952 {
2953 case REG:
2954 ADDR_BEG (file);
2955 fprintf (file, "%se", RP);
2956 fputs (hi_reg_name[REGNO (addr)], file);
2957 ADDR_END (file);
2958 break;
2959
2960 case PLUS:
2961 reg1 = 0;
2962 reg2 = 0;
2963 ireg = 0;
2964 breg = 0;
2965 offset = 0;
2966 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
2967 {
2968 offset = XEXP (addr, 0);
2969 addr = XEXP (addr, 1);
2970 }
2971 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
2972 {
2973 offset = XEXP (addr, 1);
2974 addr = XEXP (addr, 0);
2975 }
2976 if (GET_CODE (addr) != PLUS) ;
2977 else if (GET_CODE (XEXP (addr, 0)) == MULT)
2978 {
2979 reg1 = XEXP (addr, 0);
2980 addr = XEXP (addr, 1);
2981 }
2982 else if (GET_CODE (XEXP (addr, 1)) == MULT)
2983 {
2984 reg1 = XEXP (addr, 1);
2985 addr = XEXP (addr, 0);
2986 }
2987 else if (GET_CODE (XEXP (addr, 0)) == REG)
2988 {
2989 reg1 = XEXP (addr, 0);
2990 addr = XEXP (addr, 1);
2991 }
2992 else if (GET_CODE (XEXP (addr, 1)) == REG)
2993 {
2994 reg1 = XEXP (addr, 1);
2995 addr = XEXP (addr, 0);
2996 }
2997 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
2998 {
2999 if (reg1 == 0) reg1 = addr;
3000 else reg2 = addr;
3001 addr = 0;
3002 }
3003 if (offset != 0)
3004 {
3005 if (addr != 0) abort ();
3006 addr = offset;
3007 }
3008 if ((reg1 && GET_CODE (reg1) == MULT)
3009 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3010 {
3011 breg = reg2;
3012 ireg = reg1;
3013 }
3014 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3015 {
3016 breg = reg1;
3017 ireg = reg2;
3018 }
3019
3020 if (ireg != 0 || breg != 0)
3021 {
3022 int scale = 1;
3023
3024 if (addr != 0)
3025 {
c399861d
MM
3026 if (flag_pic)
3027 output_pic_addr_const (file, addr, 0);
3028
3029 else if (GET_CODE (addr) == LABEL_REF)
2a2ab3f9 3030 output_asm_label (addr);
c399861d 3031
2a2ab3f9 3032 else
c399861d 3033 output_addr_const (file, addr);
2a2ab3f9
JVA
3034 }
3035
3036 if (ireg != 0 && GET_CODE (ireg) == MULT)
3037 {
3038 scale = INTVAL (XEXP (ireg, 1));
3039 ireg = XEXP (ireg, 0);
3040 }
3041
3042 /* The stack pointer can only appear as a base register,
3043 never an index register, so exchange the regs if it is wrong. */
3044
3045 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3046 {
3047 rtx tmp;
3048
3049 tmp = breg;
3050 breg = ireg;
3051 ireg = tmp;
3052 }
3053
3054 /* output breg+ireg*scale */
3055 PRINT_B_I_S (breg, ireg, scale, file);
3056 break;
3057 }
3058
3059 case MULT:
3060 {
3061 int scale;
3062 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3063 {
3064 scale = INTVAL (XEXP (addr, 0));
3065 ireg = XEXP (addr, 1);
3066 }
3067 else
3068 {
3069 scale = INTVAL (XEXP (addr, 1));
3070 ireg = XEXP (addr, 0);
3071 }
3072 output_addr_const (file, const0_rtx);
3073 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3074 }
3075 break;
3076
3077 default:
3078 if (GET_CODE (addr) == CONST_INT
3079 && INTVAL (addr) < 0x8000
3080 && INTVAL (addr) >= -0x8000)
3081 fprintf (file, "%d", INTVAL (addr));
3082 else
3083 {
3084 if (flag_pic)
3085 output_pic_addr_const (file, addr, 0);
3086 else
3087 output_addr_const (file, addr);
3088 }
3089 }
3090}
3091\f
3092/* Set the cc_status for the results of an insn whose pattern is EXP.
3093 On the 80386, we assume that only test and compare insns, as well
291b0f34 3094 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
2a2ab3f9 3095 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
4c0d89b5
RS
3096 Also, we assume that jumps, moves and sCOND don't affect the condition
3097 codes. All else clobbers the condition codes, by assumption.
3098
3099 We assume that ALL integer add, minus, etc. instructions effect the
3100 condition codes. This MUST be consistent with i386.md.
2a2ab3f9 3101
4c0d89b5
RS
3102 We don't record any float test or compare - the redundant test &
3103 compare check in final.c does not handle stack-like regs correctly. */
2a2ab3f9
JVA
3104
3105void
3106notice_update_cc (exp)
3107 rtx exp;
3108{
3109 if (GET_CODE (exp) == SET)
3110 {
3111 /* Jumps do not alter the cc's. */
3112 if (SET_DEST (exp) == pc_rtx)
3113 return;
32b5b1aa
SC
3114#ifdef IS_STACK_MODE
3115 /* Moving into a memory of stack_mode may have been moved
3116 in between the use and set of cc0 by loop_spl(). So
3117 old value of cc.status must be retained */
3118 if(GET_CODE(SET_DEST(exp))==MEM
3119 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3120 {
3121 return;
3122 }
3123#endif
2a2ab3f9
JVA
3124 /* Moving register or memory into a register:
3125 it doesn't alter the cc's, but it might invalidate
3126 the RTX's which we remember the cc's came from.
3127 (Note that moving a constant 0 or 1 MAY set the cc's). */
3128 if (REG_P (SET_DEST (exp))
4c0d89b5
RS
3129 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3130 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
2a2ab3f9
JVA
3131 {
3132 if (cc_status.value1
3133 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3134 cc_status.value1 = 0;
3135 if (cc_status.value2
3136 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3137 cc_status.value2 = 0;
3138 return;
3139 }
3140 /* Moving register into memory doesn't alter the cc's.
3141 It may invalidate the RTX's which we remember the cc's came from. */
4c0d89b5
RS
3142 if (GET_CODE (SET_DEST (exp)) == MEM
3143 && (REG_P (SET_SRC (exp))
3144 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
2a2ab3f9 3145 {
0a78e862
SC
3146 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3147 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
2a2ab3f9 3148 cc_status.value1 = 0;
0a78e862
SC
3149 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3150 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
2a2ab3f9
JVA
3151 cc_status.value2 = 0;
3152 return;
3153 }
3154 /* Function calls clobber the cc's. */
3155 else if (GET_CODE (SET_SRC (exp)) == CALL)
3156 {
3157 CC_STATUS_INIT;
3158 return;
3159 }
3160 /* Tests and compares set the cc's in predictable ways. */
3161 else if (SET_DEST (exp) == cc0_rtx)
3162 {
3163 CC_STATUS_INIT;
3164 cc_status.value1 = SET_SRC (exp);
3165 return;
3166 }
3167 /* Certain instructions effect the condition codes. */
3168 else if (GET_MODE (SET_SRC (exp)) == SImode
3169 || GET_MODE (SET_SRC (exp)) == HImode
3170 || GET_MODE (SET_SRC (exp)) == QImode)
3171 switch (GET_CODE (SET_SRC (exp)))
3172 {
3173 case ASHIFTRT: case LSHIFTRT:
291b0f34 3174 case ASHIFT:
2a2ab3f9
JVA
3175 /* Shifts on the 386 don't set the condition codes if the
3176 shift count is zero. */
3177 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3178 {
3179 CC_STATUS_INIT;
3180 break;
3181 }
3182 /* We assume that the CONST_INT is non-zero (this rtx would
3183 have been deleted if it were zero. */
3184
3185 case PLUS: case MINUS: case NEG:
3186 case AND: case IOR: case XOR:
3187 cc_status.flags = CC_NO_OVERFLOW;
3188 cc_status.value1 = SET_SRC (exp);
3189 cc_status.value2 = SET_DEST (exp);
3190 break;
3191
3192 default:
3193 CC_STATUS_INIT;
3194 }
3195 else
3196 {
3197 CC_STATUS_INIT;
3198 }
3199 }
3200 else if (GET_CODE (exp) == PARALLEL
3201 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3202 {
3203 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3204 return;
3205 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3206 {
3207 CC_STATUS_INIT;
2247a58c
JVA
3208 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3209 cc_status.flags |= CC_IN_80387;
3210 else
4c0d89b5 3211 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
2a2ab3f9
JVA
3212 return;
3213 }
3214 CC_STATUS_INIT;
3215 }
3216 else
3217 {
3218 CC_STATUS_INIT;
3219 }
3220}
3221\f
3222/* Split one or more DImode RTL references into pairs of SImode
3223 references. The RTL can be REG, offsettable MEM, integer constant, or
3224 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3225 split and "num" is its length. lo_half and hi_half are output arrays
3226 that parallel "operands". */
3227
3228void
3229split_di (operands, num, lo_half, hi_half)
3230 rtx operands[];
3231 int num;
3232 rtx lo_half[], hi_half[];
3233{
3234 while (num--)
3235 {
3236 if (GET_CODE (operands[num]) == REG)
3237 {
3238 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3239 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3240 }
3241 else if (CONSTANT_P (operands[num]))
3242 {
3243 split_double (operands[num], &lo_half[num], &hi_half[num]);
3244 }
3245 else if (offsettable_memref_p (operands[num]))
3246 {
3247 lo_half[num] = operands[num];
3248 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3249 }
3250 else
3251 abort();
3252 }
3253}
3254\f
3255/* Return 1 if this is a valid binary operation on a 387.
3256 OP is the expression matched, and MODE is its mode. */
3257
3258int
3259binary_387_op (op, mode)
3260 register rtx op;
3261 enum machine_mode mode;
3262{
3263 if (mode != VOIDmode && mode != GET_MODE (op))
3264 return 0;
3265
3266 switch (GET_CODE (op))
3267 {
3268 case PLUS:
3269 case MINUS:
3270 case MULT:
3271 case DIV:
3272 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3273
3274 default:
3275 return 0;
3276 }
3277}
3278
3b3c6a3f 3279\f
2a2ab3f9
JVA
3280/* Return 1 if this is a valid shift or rotate operation on a 386.
3281 OP is the expression matched, and MODE is its mode. */
3282
3283int
3284shift_op (op, mode)
3285 register rtx op;
3286 enum machine_mode mode;
3287{
3288 rtx operand = XEXP (op, 0);
3289
3290 if (mode != VOIDmode && mode != GET_MODE (op))
3291 return 0;
3292
3293 if (GET_MODE (operand) != GET_MODE (op)
3294 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3295 return 0;
3296
3297 return (GET_CODE (op) == ASHIFT
3298 || GET_CODE (op) == ASHIFTRT
3299 || GET_CODE (op) == LSHIFTRT
3300 || GET_CODE (op) == ROTATE
3301 || GET_CODE (op) == ROTATERT);
3302}
ac2afb64
JVA
3303
3304/* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3305 MODE is not used. */
3306
3307int
3308VOIDmode_compare_op (op, mode)
3309 register rtx op;
3310 enum machine_mode mode;
3311{
3312 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3313}
2a2ab3f9
JVA
3314\f
3315/* Output code to perform a 387 binary operation in INSN, one of PLUS,
3316 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3317 is the expression of the binary operation. The output may either be
3318 emitted here, or returned to the caller, like all output_* functions.
3319
3320 There is no guarantee that the operands are the same mode, as they
3321 might be within FLOAT or FLOAT_EXTEND expressions. */
3322
3323char *
3324output_387_binary_op (insn, operands)
3325 rtx insn;
3326 rtx *operands;
3327{
3328 rtx temp;
3329 char *base_op;
3330 static char buf[100];
3331
3332 switch (GET_CODE (operands[3]))
3333 {
3334 case PLUS:
3335 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3336 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3337 base_op = "fiadd";
3338 else
3339 base_op = "fadd";
3340 break;
3341
3342 case MINUS:
3343 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3344 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3345 base_op = "fisub";
3346 else
3347 base_op = "fsub";
3348 break;
3349
3350 case MULT:
3351 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3352 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3353 base_op = "fimul";
3354 else
3355 base_op = "fmul";
3356 break;
3357
3358 case DIV:
3359 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3360 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3361 base_op = "fidiv";
3362 else
3363 base_op = "fdiv";
3364 break;
3365
3366 default:
3367 abort ();
3368 }
3369
3370 strcpy (buf, base_op);
3371
3372 switch (GET_CODE (operands[3]))
3373 {
3374 case MULT:
3375 case PLUS:
3376 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3377 {
3378 temp = operands[2];
3379 operands[2] = operands[1];
3380 operands[1] = temp;
3381 }
3382
3383 if (GET_CODE (operands[2]) == MEM)
3384 return strcat (buf, AS1 (%z2,%2));
3385
3386 if (NON_STACK_REG_P (operands[1]))
3387 {
3388 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3389 RET;
3390 }
3391 else if (NON_STACK_REG_P (operands[2]))
3392 {
3393 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3394 RET;
3395 }
3396
3397 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3398 return strcat (buf, AS2 (p,%2,%0));
3399
3400 if (STACK_TOP_P (operands[0]))
3f6d0a8c 3401 return strcat (buf, AS2C (%y2,%0));
2a2ab3f9 3402 else
3f6d0a8c 3403 return strcat (buf, AS2C (%2,%0));
2a2ab3f9
JVA
3404
3405 case MINUS:
3406 case DIV:
3407 if (GET_CODE (operands[1]) == MEM)
3408 return strcat (buf, AS1 (r%z1,%1));
3409
3410 if (GET_CODE (operands[2]) == MEM)
3411 return strcat (buf, AS1 (%z2,%2));
3412
3413 if (NON_STACK_REG_P (operands[1]))
3414 {
3415 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3416 RET;
3417 }
3418 else if (NON_STACK_REG_P (operands[2]))
3419 {
3420 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3421 RET;
3422 }
3423
3424 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3425 abort ();
3426
3427 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3428 return strcat (buf, AS2 (rp,%2,%0));
3429
3430 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3431 return strcat (buf, AS2 (p,%1,%0));
3432
3433 if (STACK_TOP_P (operands[0]))
3434 {
3435 if (STACK_TOP_P (operands[1]))
3f6d0a8c 3436 return strcat (buf, AS2C (%y2,%0));
2a2ab3f9
JVA
3437 else
3438 return strcat (buf, AS2 (r,%y1,%0));
3439 }
3440 else if (STACK_TOP_P (operands[1]))
3f6d0a8c 3441 return strcat (buf, AS2C (%1,%0));
2a2ab3f9
JVA
3442 else
3443 return strcat (buf, AS2 (r,%2,%0));
3444
3445 default:
3446 abort ();
3447 }
3448}
3449\f
3450/* Output code for INSN to convert a float to a signed int. OPERANDS
3451 are the insn operands. The output may be SFmode or DFmode and the
3452 input operand may be SImode or DImode. As a special case, make sure
3453 that the 387 stack top dies if the output mode is DImode, because the
3454 hardware requires this. */
3455
3456char *
3457output_fix_trunc (insn, operands)
3458 rtx insn;
3459 rtx *operands;
3460{
3461 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
305f097e 3462 rtx xops[2];
2a2ab3f9
JVA
3463
3464 if (! STACK_TOP_P (operands[1]) ||
3465 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3466 abort ();
3467
305f097e
JVA
3468 xops[0] = GEN_INT (12);
3469 xops[1] = operands[4];
3470
3471 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3472 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3473 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3474 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3475 output_asm_insn (AS1 (fldc%W3,%3), operands);
2a2ab3f9
JVA
3476
3477 if (NON_STACK_REG_P (operands[0]))
3478 output_to_reg (operands[0], stack_top_dies);
3479 else if (GET_CODE (operands[0]) == MEM)
3480 {
2a2ab3f9
JVA
3481 if (stack_top_dies)
3482 output_asm_insn (AS1 (fistp%z0,%0), operands);
3483 else
3484 output_asm_insn (AS1 (fist%z0,%0), operands);
3485 }
3486 else
3487 abort ();
3488
305f097e 3489 return AS1 (fldc%W2,%2);
2a2ab3f9
JVA
3490}
3491\f
3492/* Output code for INSN to compare OPERANDS. The two operands might
3493 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
c572e5ba
JVA
3494 expression. If the compare is in mode CCFPEQmode, use an opcode that
3495 will not fault if a qNaN is present. */
2a2ab3f9
JVA
3496
3497char *
3498output_float_compare (insn, operands)
3499 rtx insn;
3500 rtx *operands;
3501{
3502 int stack_top_dies;
c572e5ba
JVA
3503 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3504 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
2a2ab3f9 3505
32b5b1aa
SC
3506 rtx tmp;
3507 if (! STACK_TOP_P (operands[0]))
3508 {
3509 tmp = operands[0];
3510 operands[0] = operands[1];
3511 operands[1] = tmp;
3512 cc_status.flags |= CC_REVERSED;
3513 }
3514
2a2ab3f9
JVA
3515 if (! STACK_TOP_P (operands[0]))
3516 abort ();
3517
3518 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3519
3520 if (STACK_REG_P (operands[1])
3521 && stack_top_dies
3522 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3523 && REGNO (operands[1]) != FIRST_STACK_REG)
3524 {
3525 /* If both the top of the 387 stack dies, and the other operand
3526 is also a stack register that dies, then this must be a
3527 `fcompp' float compare */
3528
c572e5ba
JVA
3529 if (unordered_compare)
3530 output_asm_insn ("fucompp", operands);
3531 else
3532 output_asm_insn ("fcompp", operands);
2a2ab3f9
JVA
3533 }
3534 else
3535 {
3536 static char buf[100];
3537
c572e5ba
JVA
3538 /* Decide if this is the integer or float compare opcode, or the
3539 unordered float compare. */
2a2ab3f9 3540
c572e5ba
JVA
3541 if (unordered_compare)
3542 strcpy (buf, "fucom");
3543 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
2a2ab3f9
JVA
3544 strcpy (buf, "fcom");
3545 else
3546 strcpy (buf, "ficom");
3547
3548 /* Modify the opcode if the 387 stack is to be popped. */
3549
3550 if (stack_top_dies)
3551 strcat (buf, "p");
3552
3553 if (NON_STACK_REG_P (operands[1]))
3554 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3555 else
3556 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3557 }
3558
3559 /* Now retrieve the condition code. */
3560
c572e5ba
JVA
3561 return output_fp_cc0_set (insn);
3562}
3563\f
3564/* Output opcodes to transfer the results of FP compare or test INSN
3565 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3566 result of the compare or test is unordered, no comparison operator
3567 succeeds except NE. Return an output template, if any. */
3568
3569char *
3570output_fp_cc0_set (insn)
3571 rtx insn;
3572{
3573 rtx xops[3];
3574 rtx unordered_label;
3575 rtx next;
3576 enum rtx_code code;
3577
3578 xops[0] = gen_rtx (REG, HImode, 0);
3579 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3580
3581 if (! TARGET_IEEE_FP)
32b5b1aa
SC
3582 {
3583 if (!(cc_status.flags & CC_REVERSED))
3584 {
3585 next = next_cc0_user (insn);
3586
3587 if (GET_CODE (next) == JUMP_INSN
3588 && GET_CODE (PATTERN (next)) == SET
3589 && SET_DEST (PATTERN (next)) == pc_rtx
3590 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3591 {
3592 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3593 }
3594 else if (GET_CODE (PATTERN (next)) == SET)
3595 {
3596 code = GET_CODE (SET_SRC (PATTERN (next)));
3597 }
3598 else
3599 {
3600 return "sahf";
3601 }
3602 if (code == GT || code == LT || code == EQ || code == NE
3603 || code == LE || code == GE)
3604 { /* We will test eax directly */
3605 cc_status.flags |= CC_TEST_AX;
3606 RET;
3607 }
3608 }
3609 return "sahf";
3610 }
2a2ab3f9 3611
c572e5ba 3612 next = next_cc0_user (insn);
dd9611dc
JVA
3613 if (next == NULL_RTX)
3614 abort ();
c572e5ba
JVA
3615
3616 if (GET_CODE (next) == JUMP_INSN
3617 && GET_CODE (PATTERN (next)) == SET
3618 && SET_DEST (PATTERN (next)) == pc_rtx
3619 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3620 {
3621 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3622 }
3623 else if (GET_CODE (PATTERN (next)) == SET)
3624 {
3625 code = GET_CODE (SET_SRC (PATTERN (next)));
3626 }
3627 else
3628 abort ();
3629
3630 xops[0] = gen_rtx (REG, QImode, 0);
3631
3632 switch (code)
3633 {
3634 case GT:
435defd1 3635 xops[1] = GEN_INT (0x45);
c572e5ba
JVA
3636 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3637 /* je label */
3638 break;
3639
3640 case LT:
435defd1
JVA
3641 xops[1] = GEN_INT (0x45);
3642 xops[2] = GEN_INT (0x01);
c572e5ba
JVA
3643 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3644 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3645 /* je label */
3646 break;
3647
3648 case GE:
435defd1 3649 xops[1] = GEN_INT (0x05);
c572e5ba
JVA
3650 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3651 /* je label */
3652 break;
3653
3654 case LE:
435defd1
JVA
3655 xops[1] = GEN_INT (0x45);
3656 xops[2] = GEN_INT (0x40);
c572e5ba
JVA
3657 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3658 output_asm_insn (AS1 (dec%B0,%h0), xops);
3659 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3660 /* jb label */
3661 break;
3662
3663 case EQ:
435defd1
JVA
3664 xops[1] = GEN_INT (0x45);
3665 xops[2] = GEN_INT (0x40);
c572e5ba
JVA
3666 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3667 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3668 /* je label */
3669 break;
3670
3671 case NE:
435defd1
JVA
3672 xops[1] = GEN_INT (0x44);
3673 xops[2] = GEN_INT (0x40);
c572e5ba
JVA
3674 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3675 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3676 /* jne label */
3677 break;
3678
3679 case GTU:
3680 case LTU:
3681 case GEU:
3682 case LEU:
3683 default:
3684 abort ();
3685 }
3686 RET;
2a2ab3f9 3687}
305f097e
JVA
3688\f
3689#define MAX_386_STACK_LOCALS 2
3690
3691static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3692
ecbc4695
RS
3693/* Define the structure for the machine field in struct function. */
3694struct machine_function
3695{
3696 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3697};
3698
3699/* Functions to save and restore i386_stack_locals.
3700 These will be called, via pointer variables,
3701 from push_function_context and pop_function_context. */
3702
3703void
3704save_386_machine_status (p)
3705 struct function *p;
3706{
3707 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
dde866c6 3708 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
ecbc4695
RS
3709 sizeof i386_stack_locals);
3710}
3711
3712void
3713restore_386_machine_status (p)
3714 struct function *p;
3715{
dde866c6 3716 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
ecbc4695
RS
3717 sizeof i386_stack_locals);
3718 free (p->machine);
3719}
3720
305f097e
JVA
3721/* Clear stack slot assignments remembered from previous functions.
3722 This is called from INIT_EXPANDERS once before RTL is emitted for each
ecbc4695 3723 function. */
305f097e
JVA
3724
3725void
3726clear_386_stack_locals ()
3727{
3728 enum machine_mode mode;
3729 int n;
3730
3731 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3732 mode = (enum machine_mode) ((int) mode + 1))
3733 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3734 i386_stack_locals[(int) mode][n] = NULL_RTX;
ecbc4695
RS
3735
3736 /* Arrange to save and restore i386_stack_locals around nested functions. */
3737 save_machine_status = save_386_machine_status;
3738 restore_machine_status = restore_386_machine_status;
305f097e
JVA
3739}
3740
3741/* Return a MEM corresponding to a stack slot with mode MODE.
3742 Allocate a new slot if necessary.
3743
3744 The RTL for a function can have several slots available: N is
3745 which slot to use. */
3746
3747rtx
3748assign_386_stack_local (mode, n)
3749 enum machine_mode mode;
3750 int n;
3751{
3752 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3753 abort ();
3754
3755 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3756 i386_stack_locals[(int) mode][n]
3757 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3758
3759 return i386_stack_locals[(int) mode][n];
3760}
32b5b1aa
SC
3761
3762\f
3763int is_mul(op,mode)
3764 register rtx op;
3765 enum machine_mode mode;
3766{
3767 return (GET_CODE (op) == MULT);
3768}
3769
3770int is_div(op,mode)
3771 register rtx op;
3772 enum machine_mode mode;
3773{
3774 return (GET_CODE (op) == DIV);
3775}
3776
3777\f
3778#ifdef NOTYET
3779/* Create a new copy of an rtx.
3780 Recursively copies the operands of the rtx,
3781 except for those few rtx codes that are sharable.
3782 Doesn't share CONST */
3783
3784rtx
3785copy_all_rtx (orig)
3786 register rtx orig;
3787{
3788 register rtx copy;
3789 register int i, j;
3790 register RTX_CODE code;
3791 register char *format_ptr;
3792
3793 code = GET_CODE (orig);
3794
3795 switch (code)
3796 {
3797 case REG:
3798 case QUEUED:
3799 case CONST_INT:
3800 case CONST_DOUBLE:
3801 case SYMBOL_REF:
3802 case CODE_LABEL:
3803 case PC:
3804 case CC0:
3805 case SCRATCH:
3806 /* SCRATCH must be shared because they represent distinct values. */
3807 return orig;
3808
3809#if 0
3810 case CONST:
3811 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3812 a LABEL_REF, it isn't sharable. */
3813 if (GET_CODE (XEXP (orig, 0)) == PLUS
3814 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3815 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3816 return orig;
3817 break;
3818#endif
3819 /* A MEM with a constant address is not sharable. The problem is that
3820 the constant address may need to be reloaded. If the mem is shared,
3821 then reloading one copy of this mem will cause all copies to appear
3822 to have been reloaded. */
3823 }
3824
3825 copy = rtx_alloc (code);
3826 PUT_MODE (copy, GET_MODE (orig));
3827 copy->in_struct = orig->in_struct;
3828 copy->volatil = orig->volatil;
3829 copy->unchanging = orig->unchanging;
3830 copy->integrated = orig->integrated;
3831 /* intel1 */
3832 copy->is_spill_rtx = orig->is_spill_rtx;
3833
3834 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3835
3836 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3837 {
3838 switch (*format_ptr++)
3839 {
3840 case 'e':
3841 XEXP (copy, i) = XEXP (orig, i);
3842 if (XEXP (orig, i) != NULL)
3843 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3844 break;
3845
3846 case '0':
3847 case 'u':
3848 XEXP (copy, i) = XEXP (orig, i);
3849 break;
3850
3851 case 'E':
3852 case 'V':
3853 XVEC (copy, i) = XVEC (orig, i);
3854 if (XVEC (orig, i) != NULL)
3855 {
3856 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3857 for (j = 0; j < XVECLEN (copy, i); j++)
3858 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3859 }
3860 break;
3861
3862 case 'w':
3863 XWINT (copy, i) = XWINT (orig, i);
3864 break;
3865
3866 case 'i':
3867 XINT (copy, i) = XINT (orig, i);
3868 break;
3869
3870 case 's':
3871 case 'S':
3872 XSTR (copy, i) = XSTR (orig, i);
3873 break;
3874
3875 default:
3876 abort ();
3877 }
3878 }
3879 return copy;
3880}
3881
3882\f
3883/* try to rewrite a memory address to make it valid */
3884void
3885rewrite_address (mem_rtx)
3886 rtx mem_rtx;
3887{
3888 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
3889 int scale = 1;
3890 int offset_adjust = 0;
3891 int was_only_offset = 0;
3892 rtx mem_addr = XEXP (mem_rtx, 0);
3893 char *storage = (char *) oballoc (0);
3894 int in_struct = 0;
3895 int is_spill_rtx = 0;
3896
3897 in_struct = MEM_IN_STRUCT_P (mem_rtx);
3898 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
3899
3900 if (GET_CODE (mem_addr) == PLUS &&
3901 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
3902 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
3903 { /* this part is utilized by the combiner */
3904 ret_rtx =
3905 gen_rtx (PLUS, GET_MODE (mem_addr),
3906 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
3907 XEXP (mem_addr, 0),
3908 XEXP (XEXP (mem_addr, 1), 0)),
3909 XEXP (XEXP (mem_addr, 1), 1));
3910 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
3911 {
3912 XEXP (mem_rtx, 0) = ret_rtx;
3913 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
3914 return;
3915 }
3916 obfree (storage);
3917 }
3918
3919 /* this part is utilized by loop.c */
3920 /* If the address contains PLUS (reg,const) and this pattern is invalid
3921 in this case - try to rewrite the address to make it valid intel1
3922 */
3923 storage = (char *) oballoc (0);
3924 index_rtx = base_rtx = offset_rtx = NULL;
3925 /* find the base index and offset elements of the memory address */
3926 if (GET_CODE (mem_addr) == PLUS)
3927 {
3928 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
3929 {
3930 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3931 {
3932 base_rtx = XEXP (mem_addr, 1);
3933 index_rtx = XEXP (mem_addr, 0);
3934 }
3935 else
3936 {
3937 base_rtx = XEXP (mem_addr, 0);
3938 offset_rtx = XEXP (mem_addr, 1);
3939 }
3940 }
3941 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
3942 {
3943 index_rtx = XEXP (mem_addr, 0);
3944 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3945 {
3946 base_rtx = XEXP (mem_addr, 1);
3947 }
3948 else
3949 {
3950 offset_rtx = XEXP (mem_addr, 1);
3951 }
3952 }
3953 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
3954 {
3955 /* intel1 */
3956 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
3957 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
3958 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
3959 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
3960 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
3961 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
3962 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
3963 {
3964 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
3965 offset_rtx = XEXP (mem_addr, 1);
3966 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3967 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
3968 }
3969 else
3970 {
3971 offset_rtx = XEXP (mem_addr, 1);
3972 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
3973 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3974 }
3975 }
3976 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
3977 {
3978 was_only_offset = 1;
3979 index_rtx = NULL;
3980 base_rtx = NULL;
3981 offset_rtx = XEXP (mem_addr, 1);
3982 offset_adjust = INTVAL (XEXP (mem_addr, 0));
3983 if (offset_adjust == 0)
3984 {
3985 XEXP (mem_rtx, 0) = offset_rtx;
3986 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
3987 return;
3988 }
3989 }
3990 else
3991 {
3992 obfree (storage);
3993 return;
3994 }
3995 }
3996 else if (GET_CODE (mem_addr) == MULT)
3997 {
3998 index_rtx = mem_addr;
3999 }
4000 else
4001 {
4002 obfree (storage);
4003 return;
4004 }
4005 if (index_rtx && GET_CODE (index_rtx) == MULT)
4006 {
4007 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4008 {
4009 obfree (storage);
4010 return;
4011 }
4012 scale_rtx = XEXP (index_rtx, 1);
4013 scale = INTVAL (scale_rtx);
4014 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4015 }
4016 /* now find which of the elements are invalid and try to fix them */
4017 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4018 {
4019 offset_adjust = INTVAL (index_rtx) * scale;
4020 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4021 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4022 {
4023 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4024 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4025 {
4026 offset_rtx = copy_all_rtx (offset_rtx);
4027 XEXP (XEXP (offset_rtx, 0), 1) =
4028 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4029 if (!CONSTANT_P (offset_rtx))
4030 {
4031 obfree (storage);
4032 return;
4033 }
4034 }
4035 }
4036 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4037 {
4038 offset_rtx =
4039 gen_rtx (CONST, GET_MODE (offset_rtx),
4040 gen_rtx (PLUS, GET_MODE (offset_rtx),
4041 offset_rtx,
4042 gen_rtx (CONST_INT, 0, offset_adjust)));
4043 if (!CONSTANT_P (offset_rtx))
4044 {
4045 obfree (storage);
4046 return;
4047 }
4048 }
4049 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4050 {
4051 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4052 }
4053 else if (!offset_rtx)
4054 {
4055 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4056 }
4057 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4058 XEXP (mem_rtx, 0) = offset_rtx;
4059 return;
4060 }
4061 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4062 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4063 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4064 {
4065 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4066 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4067 }
4068 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4069 {
4070 offset_adjust += INTVAL (base_rtx);
4071 base_rtx = NULL;
4072 }
4073 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4074 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4075 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4076 {
4077 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4078 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4079 }
4080 if (index_rtx)
4081 {
4082 if (!LEGITIMATE_INDEX_P (index_rtx)
4083 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4084 {
4085 obfree (storage);
4086 return;
4087 }
4088 }
4089 if (base_rtx)
4090 {
4091 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4092 {
4093 obfree (storage);
4094 return;
4095 }
4096 }
4097 if (offset_adjust != 0)
4098 {
4099 if (offset_rtx)
4100 {
4101 if (GET_CODE (offset_rtx) == CONST &&
4102 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4103 {
4104 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4105 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4106 {
4107 offset_rtx = copy_all_rtx (offset_rtx);
4108 XEXP (XEXP (offset_rtx, 0), 1) =
4109 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4110 if (!CONSTANT_P (offset_rtx))
4111 {
4112 obfree (storage);
4113 return;
4114 }
4115 }
4116 }
4117 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4118 {
4119 offset_rtx =
4120 gen_rtx (CONST, GET_MODE (offset_rtx),
4121 gen_rtx (PLUS, GET_MODE (offset_rtx),
4122 offset_rtx,
4123 gen_rtx (CONST_INT, 0, offset_adjust)));
4124 if (!CONSTANT_P (offset_rtx))
4125 {
4126 obfree (storage);
4127 return;
4128 }
4129 }
4130 else if (GET_CODE (offset_rtx) == CONST_INT)
4131 {
4132 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4133 }
4134 else
4135 {
4136 obfree (storage);
4137 return;
4138 }
4139 }
4140 else
4141 {
4142 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4143 }
4144 if (index_rtx)
4145 {
4146 if (base_rtx)
4147 {
4148 if (scale != 1)
4149 {
4150 if (GET_CODE (offset_rtx) == CONST_INT &&
4151 INTVAL (offset_rtx) == 0)
4152 {
4153 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4154 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4155 scale_rtx),
4156 base_rtx);
4157 }
4158 else
4159 {
4160 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4161 gen_rtx (PLUS, GET_MODE (base_rtx),
4162 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4163 scale_rtx),
4164 base_rtx),
4165 offset_rtx);
4166 }
4167 }
4168 else
4169 {
4170 if (GET_CODE (offset_rtx) == CONST_INT &&
4171 INTVAL (offset_rtx) == 0)
4172 {
4173 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4174 }
4175 else
4176 {
4177 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4178 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4179 base_rtx),
4180 offset_rtx);
4181 }
4182 }
4183 }
4184 else
4185 {
4186 if (scale != 1)
4187 {
4188 if (GET_CODE (offset_rtx) == CONST_INT &&
4189 INTVAL (offset_rtx) == 0)
4190 {
4191 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4192 }
4193 else
4194 {
4195 ret_rtx =
4196 gen_rtx (PLUS, GET_MODE (offset_rtx),
4197 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4198 scale_rtx),
4199 offset_rtx);
4200 }
4201 }
4202 else
4203 {
4204 if (GET_CODE (offset_rtx) == CONST_INT &&
4205 INTVAL (offset_rtx) == 0)
4206 {
4207 ret_rtx = index_rtx;
4208 }
4209 else
4210 {
4211 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4212 }
4213 }
4214 }
4215 }
4216 else
4217 {
4218 if (base_rtx)
4219 {
4220 if (GET_CODE (offset_rtx) == CONST_INT &&
4221 INTVAL (offset_rtx) == 0)
4222 {
4223 ret_rtx = base_rtx;
4224 }
4225 else
4226 {
4227 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4228 }
4229 }
4230 else if (was_only_offset)
4231 {
4232 ret_rtx = offset_rtx;
4233 }
4234 else
4235 {
4236 obfree (storage);
4237 return;
4238 }
4239 }
4240 XEXP (mem_rtx, 0) = ret_rtx;
4241 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4242 return;
4243 }
4244 else
4245 {
4246 obfree (storage);
4247 return;
4248 }
4249}
4250#endif /* NOTYET */
4251
4252\f
4253/* return 1 if the first insn to set cc before insn also sets the register
4254 reg_rtx - otherwise return 0 */
4255int
4256last_to_set_cc (reg_rtx, insn)
4257 rtx reg_rtx, insn;
4258{
4259 rtx prev_insn = PREV_INSN (insn);
4260
4261 while (prev_insn)
4262 {
4263 if (GET_CODE (prev_insn) == NOTE)
4264 ;
4265
4266 else if (GET_CODE (prev_insn) == INSN)
4267 {
4268 if (GET_CODE (PATTERN (prev_insn)) != SET)
4269 return (0);
4270
4271 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4272 {
4273 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4274 return (1);
4275
4276 return (0);
4277 }
4278
4279 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4280 return (0);
4281 }
4282
4283 else
4284 return (0);
4285
4286 prev_insn = PREV_INSN (prev_insn);
4287 }
4288
4289 return (0);
4290}
4291
4292\f
4293int
4294doesnt_set_condition_code (pat)
4295 rtx pat;
4296{
4297 switch (GET_CODE (pat))
4298 {
4299 case MEM:
4300 case REG:
4301 return (1);
4302
4303 default:
4304 return (0);
4305
4306 }
4307}
4308
4309\f
4310int
4311sets_condition_code (pat)
4312 rtx pat;
4313{
4314 switch (GET_CODE (pat))
4315 {
4316 case PLUS:
4317 case MINUS:
4318 case AND:
4319 case IOR:
4320 case XOR:
4321 case NOT:
4322 case NEG:
4323 case MULT:
4324 case DIV:
4325 case MOD:
4326 case UDIV:
4327 case UMOD:
4328 return (1);
4329
4330 default:
4331 return (0);
4332
4333 }
4334}
4335
4336\f
4337int
4338str_immediate_operand (op, mode)
4339 register rtx op;
4340 enum machine_mode mode;
4341{
4342 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4343 {
4344 return (1);
4345 }
4346 return (0);
4347}
4348
4349\f
4350int
4351is_fp_insn (insn)
4352 rtx insn;
4353{
4354 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4355 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4356 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4357 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4358 {
4359 return (1);
4360 }
4361
4362 return (0);
4363}
4364
4365/*
4366 Return 1 if the mode of the SET_DEST of insn is floating point
4367 and it is not an fld or a move from memory to memory.
4368 Otherwise return 0 */
4369int
4370is_fp_dest (insn)
4371 rtx insn;
4372{
4373 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4374 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4375 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4376 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4377 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4378 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4379 && GET_CODE (SET_SRC (insn)) != MEM)
4380 {
4381 return (1);
4382 }
4383
4384 return (0);
4385}
4386
4387/*
4388 Return 1 if the mode of the SET_DEST floating point and is memory
4389 and the source is a register.
4390*/
4391int
4392is_fp_store (insn)
4393 rtx insn;
4394{
4395 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4396 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4397 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4398 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4399 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4400 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4401 {
4402 return (1);
4403 }
4404
4405 return (0);
4406}
4407
4408\f
4409/*
4410 Return 1 if dep_insn sets a register which insn uses as a base
4411 or index to reference memory.
4412 otherwise return 0 */
4413
4414int
4415agi_dependent (insn, dep_insn)
4416 rtx insn, dep_insn;
4417{
4418 if (GET_CODE (dep_insn) == INSN
4419 && GET_CODE (PATTERN (dep_insn)) == SET
4420 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4421 {
4422 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4423 }
4424
4425 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4426 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4427 && push_operand (SET_DEST (PATTERN (dep_insn)),
4428 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4429 {
4430 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4431 }
4432
4433 return (0);
4434}
4435
4436\f
4437/*
4438 Return 1 if reg is used in rtl as a base or index for a memory ref
4439 otherwise return 0. */
4440
4441int
4442reg_mentioned_in_mem (reg, rtl)
4443 rtx reg, rtl;
4444{
4445 register char *fmt;
4446 register int i;
4447 register enum rtx_code code;
4448
4449 if (rtl == NULL)
4450 return (0);
4451
4452 code = GET_CODE (rtl);
4453
4454 switch (code)
4455 {
4456 case HIGH:
4457 case CONST_INT:
4458 case CONST:
4459 case CONST_DOUBLE:
4460 case SYMBOL_REF:
4461 case LABEL_REF:
4462 case PC:
4463 case CC0:
4464 case SUBREG:
4465 return (0);
4466
4467
4468 }
4469
4470 if (code == MEM && reg_mentioned_p (reg, rtl))
4471 return (1);
4472
4473 fmt = GET_RTX_FORMAT (code);
4474 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4475 {
4476 if (fmt[i] == 'E')
4477 {
4478 register int j;
4479 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4480 {
4481 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4482 return 1;
4483 }
4484 }
4485
4486 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4487 return 1;
4488 }
4489
4490 return (0);
4491}
3f803cd9
SC
4492\f
4493/* Output the approprate insns for doing strlen if not just doing repnz; scasb
4494
4495 operands[0] = result, initialized with the startaddress
4496 operands[1] = alignment of the address.
4497 operands[2] = scratch register, initialized with the startaddress when
4498 not aligned, otherwise undefined
4499
4500 This is just the body. It needs the initialisations mentioned above and
4501 some address computing at the end. These things are done in i386.md. */
4502
4503char *
4504output_strlen_unroll (operands)
4505 rtx operands[];
4506{
4507 rtx xops[18];
4508
4509 xops[0] = operands[0]; /* Result */
4510 /* operands[1]; * Alignment */
4511 xops[1] = operands[2]; /* Scratch */
4512 xops[2] = GEN_INT (0);
4513 xops[3] = GEN_INT (2);
4514 xops[4] = GEN_INT (3);
4515 xops[5] = GEN_INT (4);
4516 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4517 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4518 xops[8] = gen_label_rtx (); /* label of main loop */
4519 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4520 xops[9] = gen_label_rtx (); /* pentium optimisation */
4521 xops[10] = gen_label_rtx (); /* end label 2 */
4522 xops[11] = gen_label_rtx (); /* end label 1 */
4523 xops[12] = gen_label_rtx (); /* end label */
4524 /* xops[13] * Temporary used */
4525 xops[14] = GEN_INT (0xff);
4526 xops[15] = GEN_INT (0xff00);
4527 xops[16] = GEN_INT (0xff0000);
4528 xops[17] = GEN_INT (0xff000000);
4529
4530 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4531
4532 /* is there a known alignment and is it less then 4 */
4533 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4534 {
4535 /* is there a known alignment and is it not 2 */
4536 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4537 {
4538 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4539 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4540
4541 /* leave just the 3 lower bits */
4542 /* if this is a q-register, then the high part is used later */
4543 /* therefore user andl rather than andb */
4544 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4545 /* is aligned to 4-byte adress when zero */
4546 output_asm_insn (AS1 (je,%l8), xops);
4547 /* side-effect even Parity when %eax == 3 */
4548 output_asm_insn (AS1 (jp,%6), xops);
4549
4550 /* is it aligned to 2 bytes ? */
4551 if (QI_REG_P (xops[1]))
4552 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4553 else
4554 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4555 output_asm_insn (AS1 (je,%7), xops);
4556 }
4557 else
4558 {
4559 /* since the alignment is 2, we have to check 2 or 0 bytes */
4560
4561 /* check if is aligned to 4 - byte */
4562 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4563 /* is aligned to 4-byte adress when zero */
4564 output_asm_insn (AS1 (je,%l8), xops);
4565 }
4566
4567 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4568 /* now, compare the bytes */
4569 /* compare with the high part of a q-reg gives shorter code */
4570 if (QI_REG_P (xops[1]))
4571 {
4572 /* compare the first n unaligned byte on a byte per byte basis */
4573 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4574 /* when zero we reached the end */
4575 output_asm_insn (AS1 (je,%l12), xops);
4576 /* increment the address */
4577 output_asm_insn (AS1 (inc%L0,%0), xops);
4578
4579 /* not needed with an alignment of 2 */
4580 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4581 {
4582 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4583 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4584 output_asm_insn (AS1 (je,%l12), xops);
4585 output_asm_insn (AS1 (inc%L0,%0), xops);
4586
4587 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4588 }
4589 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4590 }
4591 else
4592 {
4593 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4594 output_asm_insn (AS1 (je,%l12), xops);
4595 output_asm_insn (AS1 (inc%L0,%0), xops);
4596
4597 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4598 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4599 output_asm_insn (AS1 (je,%l12), xops);
4600 output_asm_insn (AS1 (inc%L0,%0), xops);
4601
4602 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4603 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4604 }
4605 output_asm_insn (AS1 (je,%l12), xops);
4606 output_asm_insn (AS1 (inc%L0,%0), xops);
4607 }
4608
4609 /* Generate loop to check 4 bytes at a time */
4610 /* IMHO it is not a good idea to align this loop. It gives only */
1853aadd 4611 /* huge programs, but does not help to speed up */
3f803cd9
SC
4612 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4613 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4614
4615 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4616 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4617
4618 if (QI_REG_P (xops[1]))
4619 {
1853aadd
RK
4620 /* On i586 it is faster to combine the hi- and lo- part as
4621 a kind of lookahead. If anding both yields zero, then one
4622 of both *could* be zero, otherwise none of both is zero;
4623 this saves one instruction, on i486 this is slower
4624 tested with P-90, i486DX2-66, AMD486DX2-66 */
3f803cd9
SC
4625 if(TARGET_PENTIUM)
4626 {
4627 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4628 output_asm_insn (AS1 (jne,%l9), xops);
4629 }
4630
4631 /* check first byte */
4632 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4633 output_asm_insn (AS1 (je,%l12), xops);
4634
4635 /* check second byte */
4636 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4637 output_asm_insn (AS1 (je,%l11), xops);
4638
4639 if(TARGET_PENTIUM)
4640 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4641 }
4642 else
4643 {
4644 /* check first byte */
4645 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4646 output_asm_insn (AS1 (je,%l12), xops);
4647
4648 /* check second byte */
4649 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4650 output_asm_insn (AS1 (je,%l11), xops);
4651 }
4652
4653 /* check third byte */
4654 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4655 output_asm_insn (AS1 (je,%l10), xops);
4656
4657 /* check fourth byte and increment address */
4658 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4659 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4660 output_asm_insn (AS1 (jne,%l8), xops);
4661
4662 /* now generate fixups when the compare stops within a 4-byte word */
4663 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4664
4665 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4666 output_asm_insn (AS1 (inc%L0,%0), xops);
4667
4668 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4669 output_asm_insn (AS1 (inc%L0,%0), xops);
4670
4671 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
4672
4673 RET;
4674}
This page took 0.792623 seconds and 5 git commands to generate.