]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
Whitespace, formatting, and spelling fixes in comments.
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <stdio.h>
22 #include <setjmp.h>
23 #include <ctype.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "function.h"
37
38 #ifdef EXTRA_CONSTRAINT
39 /* If EXTRA_CONSTRAINT is defined, then the 'S'
40 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
41 asm statements that need 'S' for class SIREG will break. */
42 error EXTRA_CONSTRAINT conflicts with S constraint letter
43 /* The previous line used to be #error, but some compilers barf
44 even if the conditional was untrue. */
45 #endif
46
47 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
48 {
49 reg_p,
50 mem_p,
51 imm_p
52 };
53
54 /* Processor costs (relative to an add) */
55 struct processor_costs i386_cost = { /* 386 specific costs */
56 1, /* cost of an add instruction (2 cycles) */
57 1, /* cost of a lea instruction */
58 3, /* variable shift costs */
59 2, /* constant shift costs */
60 6, /* cost of starting a multiply */
61 1, /* cost of multiply per each bit set */
62 23 /* cost of a divide/mod */
63 };
64
65 struct processor_costs i486_cost = { /* 486 specific costs */
66 1, /* cost of an add instruction */
67 1, /* cost of a lea instruction */
68 3, /* variable shift costs */
69 2, /* constant shift costs */
70 12, /* cost of starting a multiply */
71 1, /* cost of multiply per each bit set */
72 40 /* cost of a divide/mod */
73 };
74
75 struct processor_costs pentium_cost = {
76 1, /* cost of an add instruction */
77 1, /* cost of a lea instruction */
78 3, /* variable shift costs */
79 1, /* constant shift costs */
80 12, /* cost of starting a multiply */
81 1, /* cost of multiply per each bit set */
82 25 /* cost of a divide/mod */
83 };
84
85 struct processor_costs *ix86_cost = &pentium_cost;
86
87 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
88
89 extern FILE *asm_out_file;
90 extern char *strcat ();
91
92 char *singlemove_string ();
93 char *output_move_const_single ();
94 char *output_fp_cc0_set ();
95
96 char *hi_reg_name[] = HI_REGISTER_NAMES;
97 char *qi_reg_name[] = QI_REGISTER_NAMES;
98 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
99
100 /* Array of the smallest class containing reg number REGNO, indexed by
101 REGNO. Used by REGNO_REG_CLASS in i386.h. */
102
103 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
104 {
105 /* ax, dx, cx, bx */
106 AREG, DREG, CREG, BREG,
107 /* si, di, bp, sp */
108 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
109 /* FP registers */
110 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
111 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
112 /* arg pointer */
113 INDEX_REGS
114 };
115
116 /* Test and compare insns in i386.md store the information needed to
117 generate branch and scc insns here. */
118
119 struct rtx_def *i386_compare_op0 = NULL_RTX;
120 struct rtx_def *i386_compare_op1 = NULL_RTX;
121 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
122
123 /* which cpu are we scheduling for */
124 enum processor_type ix86_cpu;
125
126 /* which instruction set architecture to use. */
127 int ix86_isa;
128
129 /* Strings to hold which cpu and instruction set architecture to use. */
130 char *ix86_cpu_string; /* for -mcpu=<xxx> */
131 char *ix86_isa_string; /* for -misa=<xxx> */
132
133 /* Register allocation order */
134 char *i386_reg_alloc_order;
135 static char regs_allocated[FIRST_PSEUDO_REGISTER];
136
137 /* # of registers to use to pass arguments. */
138 char *i386_regparm_string; /* # registers to use to pass args */
139 int i386_regparm; /* i386_regparm_string as a number */
140
141 /* Alignment to use for loops and jumps */
142 char *i386_align_loops_string; /* power of two alignment for loops */
143 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
144 char *i386_align_funcs_string; /* power of two alignment for functions */
145
146 int i386_align_loops; /* power of two alignment for loops */
147 int i386_align_jumps; /* power of two alignment for non-loop jumps */
148 int i386_align_funcs; /* power of two alignment for functions */
149
150 /* Sometimes certain combinations of command options do not make
151 sense on a particular target machine. You can define a macro
152 `OVERRIDE_OPTIONS' to take account of this. This macro, if
153 defined, is executed once just after all the command options have
154 been parsed.
155
156 Don't use this macro to turn on various extra optimizations for
157 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
158
159 void
160 override_options ()
161 {
162 int ch, i, j, regno;
163 char *p;
164 int def_align;
165
166 static struct ptt
167 {
168 char *name; /* Canonical processor name. */
169 enum processor_type processor; /* Processor type enum value. */
170 struct processor_costs *cost; /* Processor costs */
171 int target_enable; /* Target flags to enable. */
172 int target_disable; /* Target flags to disable. */
173 } processor_target_table[]
174 = {{PROCESSOR_COMMON_STRING, PROCESSOR_COMMON, &i486_cost, 0, 0},
175 {PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
176 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
177 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
178 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
179 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
180 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
181
182 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
183
184 #ifdef SUBTARGET_OVERRIDE_OPTIONS
185 SUBTARGET_OVERRIDE_OPTIONS;
186 #endif
187
188 /* Validate registers in register allocation order */
189 if (i386_reg_alloc_order)
190 {
191 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
192 {
193 switch (ch)
194 {
195 case 'a': regno = 0; break;
196 case 'd': regno = 1; break;
197 case 'c': regno = 2; break;
198 case 'b': regno = 3; break;
199 case 'S': regno = 4; break;
200 case 'D': regno = 5; break;
201 case 'B': regno = 6; break;
202
203 default: fatal ("Register '%c' is unknown", ch);
204 }
205
206 if (regs_allocated[regno])
207 fatal ("Register '%c' was already specified in the allocation order", ch);
208
209 regs_allocated[regno] = 1;
210 }
211 }
212
213 /* Get the architectural level. */
214 if (ix86_isa_string == (char *)0)
215 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
216
217 for (i = 0; i < ptt_size; i++)
218 if (! strcmp (ix86_isa_string, processor_target_table[i].name))
219 {
220 ix86_isa = processor_target_table[i].processor;
221 if (ix86_cpu_string == (char *)0)
222 ix86_cpu_string = processor_target_table[i].name;
223 break;
224 }
225
226 if (i == ptt_size)
227 {
228 error ("bad value (%s) for -misa= switch", ix86_isa_string);
229 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
230 ix86_isa = PROCESSOR_DEFAULT;
231 }
232
233 for (j = 0; j < ptt_size; j++)
234 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
235 {
236 ix86_cpu = processor_target_table[j].processor;
237 if (i > j && (int)ix86_isa >= (int)PROCESSOR_PENTIUMPRO)
238 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_isa_string);
239
240 target_flags |= processor_target_table[j].target_enable;
241 target_flags &= ~processor_target_table[j].target_disable;
242 break;
243 }
244
245 if (j == ptt_size)
246 {
247 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
248 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
249 ix86_cpu = PROCESSOR_DEFAULT;
250 }
251
252 /* Validate -mregparm= value */
253 if (i386_regparm_string)
254 {
255 i386_regparm = atoi (i386_regparm_string);
256 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
257 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
258 }
259
260 def_align = (TARGET_386) ? 2 : 4;
261
262 /* Validate -malign-loops= value, or provide default */
263 if (i386_align_loops_string)
264 {
265 i386_align_loops = atoi (i386_align_loops_string);
266 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
267 fatal ("-malign-loops=%d is not between 0 and %d",
268 i386_align_loops, MAX_CODE_ALIGN);
269 }
270 else
271 i386_align_loops = 2;
272
273 /* Validate -malign-jumps= value, or provide default */
274 if (i386_align_jumps_string)
275 {
276 i386_align_jumps = atoi (i386_align_jumps_string);
277 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
278 fatal ("-malign-jumps=%d is not between 0 and %d",
279 i386_align_jumps, MAX_CODE_ALIGN);
280 }
281 else
282 i386_align_jumps = def_align;
283
284 /* Validate -malign-functions= value, or provide default */
285 if (i386_align_funcs_string)
286 {
287 i386_align_funcs = atoi (i386_align_funcs_string);
288 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
289 fatal ("-malign-functions=%d is not between 0 and %d",
290 i386_align_funcs, MAX_CODE_ALIGN);
291 }
292 else
293 i386_align_funcs = def_align;
294
295 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
296 flag_omit_frame_pointer = 1;
297 }
298 \f
299 /* A C statement (sans semicolon) to choose the order in which to
300 allocate hard registers for pseudo-registers local to a basic
301 block.
302
303 Store the desired register order in the array `reg_alloc_order'.
304 Element 0 should be the register to allocate first; element 1, the
305 next register; and so on.
306
307 The macro body should not assume anything about the contents of
308 `reg_alloc_order' before execution of the macro.
309
310 On most machines, it is not necessary to define this macro. */
311
312 void
313 order_regs_for_local_alloc ()
314 {
315 int i, ch, order, regno;
316
317 /* User specified the register allocation order */
318 if (i386_reg_alloc_order)
319 {
320 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
321 {
322 switch (ch)
323 {
324 case 'a': regno = 0; break;
325 case 'd': regno = 1; break;
326 case 'c': regno = 2; break;
327 case 'b': regno = 3; break;
328 case 'S': regno = 4; break;
329 case 'D': regno = 5; break;
330 case 'B': regno = 6; break;
331 }
332
333 reg_alloc_order[order++] = regno;
334 }
335
336 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
337 {
338 if (!regs_allocated[i])
339 reg_alloc_order[order++] = i;
340 }
341 }
342
343 /* If users did not specify a register allocation order, use natural order */
344 else
345 {
346 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
347 reg_alloc_order[i] = i;
348 }
349 }
350
351 \f
352 void
353 optimization_options (level)
354 int level;
355 {
356 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
357 make the problem with not enough registers even worse */
358 #ifdef INSN_SCHEDULING
359 if (level > 1)
360 flag_schedule_insns = 0;
361 #endif
362 }
363 \f
364 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
365 attribute for DECL. The attributes in ATTRIBUTES have previously been
366 assigned to DECL. */
367
368 int
369 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
370 tree decl;
371 tree attributes;
372 tree identifier;
373 tree args;
374 {
375 return 0;
376 }
377
378 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
379 attribute for TYPE. The attributes in ATTRIBUTES have previously been
380 assigned to TYPE. */
381
382 int
383 i386_valid_type_attribute_p (type, attributes, identifier, args)
384 tree type;
385 tree attributes;
386 tree identifier;
387 tree args;
388 {
389 if (TREE_CODE (type) != FUNCTION_TYPE
390 && TREE_CODE (type) != FIELD_DECL
391 && TREE_CODE (type) != TYPE_DECL)
392 return 0;
393
394 /* Stdcall attribute says callee is responsible for popping arguments
395 if they are not variable. */
396 if (is_attribute_p ("stdcall", identifier))
397 return (args == NULL_TREE);
398
399 /* Cdecl attribute says the callee is a normal C declaration */
400 if (is_attribute_p ("cdecl", identifier))
401 return (args == NULL_TREE);
402
403 /* Regparm attribute specifies how many integer arguments are to be
404 passed in registers */
405 if (is_attribute_p ("regparm", identifier))
406 {
407 tree cst;
408
409 if (!args || TREE_CODE (args) != TREE_LIST
410 || TREE_CHAIN (args) != NULL_TREE
411 || TREE_VALUE (args) == NULL_TREE)
412 return 0;
413
414 cst = TREE_VALUE (args);
415 if (TREE_CODE (cst) != INTEGER_CST)
416 return 0;
417
418 if (TREE_INT_CST_HIGH (cst) != 0
419 || TREE_INT_CST_LOW (cst) < 0
420 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
421 return 0;
422
423 return 1;
424 }
425
426 return 0;
427 }
428
429 /* Return 0 if the attributes for two types are incompatible, 1 if they
430 are compatible, and 2 if they are nearly compatible (which causes a
431 warning to be generated). */
432
433 int
434 i386_comp_type_attributes (type1, type2)
435 tree type1;
436 tree type2;
437 {
438 return 1;
439 }
440
441 \f
442 /* Value is the number of bytes of arguments automatically
443 popped when returning from a subroutine call.
444 FUNDECL is the declaration node of the function (as a tree),
445 FUNTYPE is the data type of the function (as a tree),
446 or for a library call it is an identifier node for the subroutine name.
447 SIZE is the number of bytes of arguments passed on the stack.
448
449 On the 80386, the RTD insn may be used to pop them if the number
450 of args is fixed, but if the number is variable then the caller
451 must pop them all. RTD can't be used for library calls now
452 because the library is compiled with the Unix compiler.
453 Use of RTD is a selectable option, since it is incompatible with
454 standard Unix calling sequences. If the option is not selected,
455 the caller must always pop the args.
456
457 The attribute stdcall is equivalent to RTD on a per module basis. */
458
459 int
460 i386_return_pops_args (fundecl, funtype, size)
461 tree fundecl;
462 tree funtype;
463 int size;
464 {
465 int rtd = TARGET_RTD;
466
467 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
468 return 0;
469
470 /* Cdecl functions override -mrtd, and never pop the stack */
471 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
472
473 /* Stdcall functions will pop the stack if not variable args */
474 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
475 rtd = 1;
476
477 if (rtd
478 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
479 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
480 return size;
481 }
482
483 /* Lose any fake structure return argument */
484 if (aggregate_value_p (TREE_TYPE (funtype)))
485 return GET_MODE_SIZE (Pmode);
486
487 return 0;
488 }
489
490 \f
491 /* Argument support functions. */
492
493 /* Initialize a variable CUM of type CUMULATIVE_ARGS
494 for a call to a function whose data type is FNTYPE.
495 For a library call, FNTYPE is 0. */
496
497 void
498 init_cumulative_args (cum, fntype, libname)
499 CUMULATIVE_ARGS *cum; /* argument info to initialize */
500 tree fntype; /* tree ptr for function decl */
501 rtx libname; /* SYMBOL_REF of library name or 0 */
502 {
503 static CUMULATIVE_ARGS zero_cum;
504 tree param, next_param;
505
506 if (TARGET_DEBUG_ARG)
507 {
508 fprintf (stderr, "\ninit_cumulative_args (");
509 if (fntype)
510 {
511 tree ret_type = TREE_TYPE (fntype);
512 fprintf (stderr, "fntype code = %s, ret code = %s",
513 tree_code_name[ (int)TREE_CODE (fntype) ],
514 tree_code_name[ (int)TREE_CODE (ret_type) ]);
515 }
516 else
517 fprintf (stderr, "no fntype");
518
519 if (libname)
520 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
521 }
522
523 *cum = zero_cum;
524
525 /* Set up the number of registers to use for passing arguments. */
526 cum->nregs = i386_regparm;
527 if (fntype)
528 {
529 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
530 if (attr)
531 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
532 }
533
534 /* Determine if this function has variable arguments. This is
535 indicated by the last argument being 'void_type_mode' if there
536 are no variable arguments. If there are variable arguments, then
537 we won't pass anything in registers */
538
539 if (cum->nregs)
540 {
541 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
542 param != (tree)0;
543 param = next_param)
544 {
545 next_param = TREE_CHAIN (param);
546 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
547 cum->nregs = 0;
548 }
549 }
550
551 if (TARGET_DEBUG_ARG)
552 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
553
554 return;
555 }
556
557 /* Update the data in CUM to advance over an argument
558 of mode MODE and data type TYPE.
559 (TYPE is null for libcalls where that information may not be available.) */
560
561 void
562 function_arg_advance (cum, mode, type, named)
563 CUMULATIVE_ARGS *cum; /* current arg information */
564 enum machine_mode mode; /* current arg mode */
565 tree type; /* type of the argument or 0 if lib support */
566 int named; /* whether or not the argument was named */
567 {
568 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
569 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
570
571 if (TARGET_DEBUG_ARG)
572 fprintf (stderr,
573 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
574 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
575
576 cum->words += words;
577 cum->nregs -= words;
578 cum->regno += words;
579
580 if (cum->nregs <= 0)
581 {
582 cum->nregs = 0;
583 cum->regno = 0;
584 }
585
586 return;
587 }
588
589 /* Define where to put the arguments to a function.
590 Value is zero to push the argument on the stack,
591 or a hard register in which to store the argument.
592
593 MODE is the argument's machine mode.
594 TYPE is the data type of the argument (as a tree).
595 This is null for libcalls where that information may
596 not be available.
597 CUM is a variable of type CUMULATIVE_ARGS which gives info about
598 the preceding args and about the function being called.
599 NAMED is nonzero if this argument is a named parameter
600 (otherwise it is an extra parameter matching an ellipsis). */
601
602 struct rtx_def *
603 function_arg (cum, mode, type, named)
604 CUMULATIVE_ARGS *cum; /* current arg information */
605 enum machine_mode mode; /* current arg mode */
606 tree type; /* type of the argument or 0 if lib support */
607 int named; /* != 0 for normal args, == 0 for ... args */
608 {
609 rtx ret = NULL_RTX;
610 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
611 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
612
613 switch (mode)
614 {
615 default: /* for now, pass fp/complex values on the stack */
616 break;
617
618 case BLKmode:
619 case DImode:
620 case SImode:
621 case HImode:
622 case QImode:
623 if (words <= cum->nregs)
624 ret = gen_rtx (REG, mode, cum->regno);
625 break;
626 }
627
628 if (TARGET_DEBUG_ARG)
629 {
630 fprintf (stderr,
631 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
632 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
633
634 if (ret)
635 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
636 else
637 fprintf (stderr, ", stack");
638
639 fprintf (stderr, " )\n");
640 }
641
642 return ret;
643 }
644
645 /* For an arg passed partly in registers and partly in memory,
646 this is the number of registers used.
647 For args passed entirely in registers or entirely in memory, zero. */
648
649 int
650 function_arg_partial_nregs (cum, mode, type, named)
651 CUMULATIVE_ARGS *cum; /* current arg information */
652 enum machine_mode mode; /* current arg mode */
653 tree type; /* type of the argument or 0 if lib support */
654 int named; /* != 0 for normal args, == 0 for ... args */
655 {
656 return 0;
657 }
658
659 \f
660 /* Output an insn whose source is a 386 integer register. SRC is the
661 rtx for the register, and TEMPLATE is the op-code template. SRC may
662 be either SImode or DImode.
663
664 The template will be output with operands[0] as SRC, and operands[1]
665 as a pointer to the top of the 386 stack. So a call from floatsidf2
666 would look like this:
667
668 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
669
670 where %z0 corresponds to the caller's operands[1], and is used to
671 emit the proper size suffix.
672
673 ??? Extend this to handle HImode - a 387 can load and store HImode
674 values directly. */
675
676 void
677 output_op_from_reg (src, template)
678 rtx src;
679 char *template;
680 {
681 rtx xops[4];
682 int size = GET_MODE_SIZE (GET_MODE (src));
683
684 xops[0] = src;
685 xops[1] = AT_SP (Pmode);
686 xops[2] = GEN_INT (size);
687 xops[3] = stack_pointer_rtx;
688
689 if (size > UNITS_PER_WORD)
690 {
691 rtx high;
692 if (size > 2 * UNITS_PER_WORD)
693 {
694 high = gen_rtx (REG, SImode, REGNO (src) + 2);
695 output_asm_insn (AS1 (push%L0,%0), &high);
696 }
697 high = gen_rtx (REG, SImode, REGNO (src) + 1);
698 output_asm_insn (AS1 (push%L0,%0), &high);
699 }
700 output_asm_insn (AS1 (push%L0,%0), &src);
701
702 output_asm_insn (template, xops);
703
704 output_asm_insn (AS2 (add%L3,%2,%3), xops);
705 }
706 \f
707 /* Output an insn to pop an value from the 387 top-of-stack to 386
708 register DEST. The 387 register stack is popped if DIES is true. If
709 the mode of DEST is an integer mode, a `fist' integer store is done,
710 otherwise a `fst' float store is done. */
711
712 void
713 output_to_reg (dest, dies)
714 rtx dest;
715 int dies;
716 {
717 rtx xops[4];
718 int size = GET_MODE_SIZE (GET_MODE (dest));
719
720 xops[0] = AT_SP (Pmode);
721 xops[1] = stack_pointer_rtx;
722 xops[2] = GEN_INT (size);
723 xops[3] = dest;
724
725 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
726
727 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
728 {
729 if (dies)
730 output_asm_insn (AS1 (fistp%z3,%y0), xops);
731 else
732 output_asm_insn (AS1 (fist%z3,%y0), xops);
733 }
734 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
735 {
736 if (dies)
737 output_asm_insn (AS1 (fstp%z3,%y0), xops);
738 else
739 {
740 if (GET_MODE (dest) == XFmode)
741 {
742 output_asm_insn (AS1 (fstp%z3,%y0), xops);
743 output_asm_insn (AS1 (fld%z3,%y0), xops);
744 }
745 else
746 output_asm_insn (AS1 (fst%z3,%y0), xops);
747 }
748 }
749 else
750 abort ();
751
752 output_asm_insn (AS1 (pop%L0,%0), &dest);
753
754 if (size > UNITS_PER_WORD)
755 {
756 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
757 output_asm_insn (AS1 (pop%L0,%0), &dest);
758 if (size > 2 * UNITS_PER_WORD)
759 {
760 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
761 output_asm_insn (AS1 (pop%L0,%0), &dest);
762 }
763 }
764 }
765 \f
766 char *
767 singlemove_string (operands)
768 rtx *operands;
769 {
770 rtx x;
771 if (GET_CODE (operands[0]) == MEM
772 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
773 {
774 if (XEXP (x, 0) != stack_pointer_rtx)
775 abort ();
776 return "push%L1 %1";
777 }
778 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
779 {
780 return output_move_const_single (operands);
781 }
782 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
783 return AS2 (mov%L0,%1,%0);
784 else if (CONSTANT_P (operands[1]))
785 return AS2 (mov%L0,%1,%0);
786 else
787 {
788 output_asm_insn ("push%L1 %1", operands);
789 return "pop%L0 %0";
790 }
791 }
792 \f
793 /* Return a REG that occurs in ADDR with coefficient 1.
794 ADDR can be effectively incremented by incrementing REG. */
795
796 static rtx
797 find_addr_reg (addr)
798 rtx addr;
799 {
800 while (GET_CODE (addr) == PLUS)
801 {
802 if (GET_CODE (XEXP (addr, 0)) == REG)
803 addr = XEXP (addr, 0);
804 else if (GET_CODE (XEXP (addr, 1)) == REG)
805 addr = XEXP (addr, 1);
806 else if (CONSTANT_P (XEXP (addr, 0)))
807 addr = XEXP (addr, 1);
808 else if (CONSTANT_P (XEXP (addr, 1)))
809 addr = XEXP (addr, 0);
810 else
811 abort ();
812 }
813 if (GET_CODE (addr) == REG)
814 return addr;
815 abort ();
816 }
817
818 \f
819 /* Output an insn to add the constant N to the register X. */
820
821 static void
822 asm_add (n, x)
823 int n;
824 rtx x;
825 {
826 rtx xops[2];
827 xops[0] = x;
828
829 if (n == -1)
830 output_asm_insn (AS1 (dec%L0,%0), xops);
831 else if (n == 1)
832 output_asm_insn (AS1 (inc%L0,%0), xops);
833 else if (n < 0)
834 {
835 xops[1] = GEN_INT (-n);
836 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
837 }
838 else if (n > 0)
839 {
840 xops[1] = GEN_INT (n);
841 output_asm_insn (AS2 (add%L0,%1,%0), xops);
842 }
843 }
844
845 \f
846 /* Output assembler code to perform a doubleword move insn
847 with operands OPERANDS. */
848
849 char *
850 output_move_double (operands)
851 rtx *operands;
852 {
853 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
854 rtx latehalf[2];
855 rtx middlehalf[2];
856 rtx xops[2];
857 rtx addreg0 = 0, addreg1 = 0;
858 int dest_overlapped_low = 0;
859 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
860
861 middlehalf[0] = 0;
862 middlehalf[1] = 0;
863
864 /* First classify both operands. */
865
866 if (REG_P (operands[0]))
867 optype0 = REGOP;
868 else if (offsettable_memref_p (operands[0]))
869 optype0 = OFFSOP;
870 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
871 optype0 = POPOP;
872 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
873 optype0 = PUSHOP;
874 else if (GET_CODE (operands[0]) == MEM)
875 optype0 = MEMOP;
876 else
877 optype0 = RNDOP;
878
879 if (REG_P (operands[1]))
880 optype1 = REGOP;
881 else if (CONSTANT_P (operands[1]))
882 optype1 = CNSTOP;
883 else if (offsettable_memref_p (operands[1]))
884 optype1 = OFFSOP;
885 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
886 optype1 = POPOP;
887 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
888 optype1 = PUSHOP;
889 else if (GET_CODE (operands[1]) == MEM)
890 optype1 = MEMOP;
891 else
892 optype1 = RNDOP;
893
894 /* Check for the cases that the operand constraints are not
895 supposed to allow to happen. Abort if we get one,
896 because generating code for these cases is painful. */
897
898 if (optype0 == RNDOP || optype1 == RNDOP)
899 abort ();
900
901 /* If one operand is decrementing and one is incrementing
902 decrement the former register explicitly
903 and change that operand into ordinary indexing. */
904
905 if (optype0 == PUSHOP && optype1 == POPOP)
906 {
907 /* ??? Can this ever happen on i386? */
908 operands[0] = XEXP (XEXP (operands[0], 0), 0);
909 asm_add (-size, operands[0]);
910 if (GET_MODE (operands[1]) == XFmode)
911 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
912 else if (GET_MODE (operands[0]) == DFmode)
913 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
914 else
915 operands[0] = gen_rtx (MEM, DImode, operands[0]);
916 optype0 = OFFSOP;
917 }
918
919 if (optype0 == POPOP && optype1 == PUSHOP)
920 {
921 /* ??? Can this ever happen on i386? */
922 operands[1] = XEXP (XEXP (operands[1], 0), 0);
923 asm_add (-size, operands[1]);
924 if (GET_MODE (operands[1]) == XFmode)
925 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
926 else if (GET_MODE (operands[1]) == DFmode)
927 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
928 else
929 operands[1] = gen_rtx (MEM, DImode, operands[1]);
930 optype1 = OFFSOP;
931 }
932
933 /* If an operand is an unoffsettable memory ref, find a register
934 we can increment temporarily to make it refer to the second word. */
935
936 if (optype0 == MEMOP)
937 addreg0 = find_addr_reg (XEXP (operands[0], 0));
938
939 if (optype1 == MEMOP)
940 addreg1 = find_addr_reg (XEXP (operands[1], 0));
941
942 /* Ok, we can do one word at a time.
943 Normally we do the low-numbered word first,
944 but if either operand is autodecrementing then we
945 do the high-numbered word first.
946
947 In either case, set up in LATEHALF the operands to use
948 for the high-numbered word and in some cases alter the
949 operands in OPERANDS to be suitable for the low-numbered word. */
950
951 if (size == 12)
952 {
953 if (optype0 == REGOP)
954 {
955 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
956 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
957 }
958 else if (optype0 == OFFSOP)
959 {
960 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
961 latehalf[0] = adj_offsettable_operand (operands[0], 8);
962 }
963 else
964 {
965 middlehalf[0] = operands[0];
966 latehalf[0] = operands[0];
967 }
968
969 if (optype1 == REGOP)
970 {
971 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
972 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
973 }
974 else if (optype1 == OFFSOP)
975 {
976 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
977 latehalf[1] = adj_offsettable_operand (operands[1], 8);
978 }
979 else if (optype1 == CNSTOP)
980 {
981 if (GET_CODE (operands[1]) == CONST_DOUBLE)
982 {
983 REAL_VALUE_TYPE r; long l[3];
984
985 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
986 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
987 operands[1] = GEN_INT (l[0]);
988 middlehalf[1] = GEN_INT (l[1]);
989 latehalf[1] = GEN_INT (l[2]);
990 }
991 else if (CONSTANT_P (operands[1]))
992 /* No non-CONST_DOUBLE constant should ever appear here. */
993 abort ();
994 }
995 else
996 {
997 middlehalf[1] = operands[1];
998 latehalf[1] = operands[1];
999 }
1000 }
1001 else /* size is not 12: */
1002 {
1003 if (optype0 == REGOP)
1004 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1005 else if (optype0 == OFFSOP)
1006 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1007 else
1008 latehalf[0] = operands[0];
1009
1010 if (optype1 == REGOP)
1011 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1012 else if (optype1 == OFFSOP)
1013 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1014 else if (optype1 == CNSTOP)
1015 split_double (operands[1], &operands[1], &latehalf[1]);
1016 else
1017 latehalf[1] = operands[1];
1018 }
1019
1020 /* If insn is effectively movd N (sp),-(sp) then we will do the
1021 high word first. We should use the adjusted operand 1
1022 (which is N+4 (sp) or N+8 (sp))
1023 for the low word and middle word as well,
1024 to compensate for the first decrement of sp. */
1025 if (optype0 == PUSHOP
1026 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1027 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1028 middlehalf[1] = operands[1] = latehalf[1];
1029
1030 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1031 if the upper part of reg N does not appear in the MEM, arrange to
1032 emit the move late-half first. Otherwise, compute the MEM address
1033 into the upper part of N and use that as a pointer to the memory
1034 operand. */
1035 if (optype0 == REGOP
1036 && (optype1 == OFFSOP || optype1 == MEMOP))
1037 {
1038 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1039 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1040 {
1041 /* If both halves of dest are used in the src memory address,
1042 compute the address into latehalf of dest. */
1043 compadr:
1044 xops[0] = latehalf[0];
1045 xops[1] = XEXP (operands[1], 0);
1046 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1047 if( GET_MODE (operands[1]) == XFmode )
1048 {
1049 /* abort (); */
1050 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1051 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1052 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1053 }
1054 else
1055 {
1056 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1057 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1058 }
1059 }
1060 else if (size == 12
1061 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1062 {
1063 /* Check for two regs used by both source and dest. */
1064 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1065 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1066 goto compadr;
1067
1068 /* JRV says this can't happen: */
1069 if (addreg0 || addreg1)
1070 abort();
1071
1072 /* Only the middle reg conflicts; simply put it last. */
1073 output_asm_insn (singlemove_string (operands), operands);
1074 output_asm_insn (singlemove_string (latehalf), latehalf);
1075 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1076 return "";
1077 }
1078 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1079 /* If the low half of dest is mentioned in the source memory
1080 address, the arrange to emit the move late half first. */
1081 dest_overlapped_low = 1;
1082 }
1083
1084 /* If one or both operands autodecrementing,
1085 do the two words, high-numbered first. */
1086
1087 /* Likewise, the first move would clobber the source of the second one,
1088 do them in the other order. This happens only for registers;
1089 such overlap can't happen in memory unless the user explicitly
1090 sets it up, and that is an undefined circumstance. */
1091
1092 /*
1093 if (optype0 == PUSHOP || optype1 == PUSHOP
1094 || (optype0 == REGOP && optype1 == REGOP
1095 && REGNO (operands[0]) == REGNO (latehalf[1]))
1096 || dest_overlapped_low)
1097 */
1098 if (optype0 == PUSHOP || optype1 == PUSHOP
1099 || (optype0 == REGOP && optype1 == REGOP
1100 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1101 || REGNO (operands[0]) == REGNO (latehalf[1])))
1102 || dest_overlapped_low)
1103 {
1104 /* Make any unoffsettable addresses point at high-numbered word. */
1105 if (addreg0)
1106 asm_add (size-4, addreg0);
1107 if (addreg1)
1108 asm_add (size-4, addreg1);
1109
1110 /* Do that word. */
1111 output_asm_insn (singlemove_string (latehalf), latehalf);
1112
1113 /* Undo the adds we just did. */
1114 if (addreg0)
1115 asm_add (-4, addreg0);
1116 if (addreg1)
1117 asm_add (-4, addreg1);
1118
1119 if (size == 12)
1120 {
1121 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1122 if (addreg0)
1123 asm_add (-4, addreg0);
1124 if (addreg1)
1125 asm_add (-4, addreg1);
1126 }
1127
1128 /* Do low-numbered word. */
1129 return singlemove_string (operands);
1130 }
1131
1132 /* Normal case: do the two words, low-numbered first. */
1133
1134 output_asm_insn (singlemove_string (operands), operands);
1135
1136 /* Do the middle one of the three words for long double */
1137 if (size == 12)
1138 {
1139 if (addreg0)
1140 asm_add (4, addreg0);
1141 if (addreg1)
1142 asm_add (4, addreg1);
1143
1144 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1145 }
1146
1147 /* Make any unoffsettable addresses point at high-numbered word. */
1148 if (addreg0)
1149 asm_add (4, addreg0);
1150 if (addreg1)
1151 asm_add (4, addreg1);
1152
1153 /* Do that word. */
1154 output_asm_insn (singlemove_string (latehalf), latehalf);
1155
1156 /* Undo the adds we just did. */
1157 if (addreg0)
1158 asm_add (4-size, addreg0);
1159 if (addreg1)
1160 asm_add (4-size, addreg1);
1161
1162 return "";
1163 }
1164
1165 \f
1166 #define MAX_TMPS 2 /* max temporary registers used */
1167
1168 /* Output the appropriate code to move push memory on the stack */
1169
1170 char *
1171 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1172 rtx operands[];
1173 rtx insn;
1174 int length;
1175 int tmp_start;
1176 int n_operands;
1177 {
1178
1179 struct {
1180 char *load;
1181 char *push;
1182 rtx xops[2];
1183 } tmp_info[MAX_TMPS];
1184
1185 rtx src = operands[1];
1186 int max_tmps = 0;
1187 int offset = 0;
1188 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1189 int stack_offset = 0;
1190 int i, num_tmps;
1191 rtx xops[1];
1192
1193 if (!offsettable_memref_p (src))
1194 fatal_insn ("Source is not offsettable", insn);
1195
1196 if ((length & 3) != 0)
1197 fatal_insn ("Pushing non-word aligned size", insn);
1198
1199 /* Figure out which temporary registers we have available */
1200 for (i = tmp_start; i < n_operands; i++)
1201 {
1202 if (GET_CODE (operands[i]) == REG)
1203 {
1204 if (reg_overlap_mentioned_p (operands[i], src))
1205 continue;
1206
1207 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1208 if (max_tmps == MAX_TMPS)
1209 break;
1210 }
1211 }
1212
1213 if (max_tmps == 0)
1214 for (offset = length - 4; offset >= 0; offset -= 4)
1215 {
1216 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1217 output_asm_insn (AS1(push%L0,%0), xops);
1218 if (stack_p)
1219 stack_offset += 4;
1220 }
1221
1222 else
1223 for (offset = length - 4; offset >= 0; )
1224 {
1225 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1226 {
1227 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1228 tmp_info[num_tmps].push = AS1(push%L0,%1);
1229 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1230 offset -= 4;
1231 }
1232
1233 for (i = 0; i < num_tmps; i++)
1234 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1235
1236 for (i = 0; i < num_tmps; i++)
1237 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1238
1239 if (stack_p)
1240 stack_offset += 4*num_tmps;
1241 }
1242
1243 return "";
1244 }
1245
1246 \f
1247
1248 /* Output the appropriate code to move data between two memory locations */
1249
1250 char *
1251 output_move_memory (operands, insn, length, tmp_start, n_operands)
1252 rtx operands[];
1253 rtx insn;
1254 int length;
1255 int tmp_start;
1256 int n_operands;
1257 {
1258 struct {
1259 char *load;
1260 char *store;
1261 rtx xops[3];
1262 } tmp_info[MAX_TMPS];
1263
1264 rtx dest = operands[0];
1265 rtx src = operands[1];
1266 rtx qi_tmp = NULL_RTX;
1267 int max_tmps = 0;
1268 int offset = 0;
1269 int i, num_tmps;
1270 rtx xops[3];
1271
1272 if (GET_CODE (dest) == MEM
1273 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1274 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1275 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1276
1277 if (!offsettable_memref_p (src))
1278 fatal_insn ("Source is not offsettable", insn);
1279
1280 if (!offsettable_memref_p (dest))
1281 fatal_insn ("Destination is not offsettable", insn);
1282
1283 /* Figure out which temporary registers we have available */
1284 for (i = tmp_start; i < n_operands; i++)
1285 {
1286 if (GET_CODE (operands[i]) == REG)
1287 {
1288 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1289 qi_tmp = operands[i];
1290
1291 if (reg_overlap_mentioned_p (operands[i], dest))
1292 fatal_insn ("Temporary register overlaps the destination", insn);
1293
1294 if (reg_overlap_mentioned_p (operands[i], src))
1295 fatal_insn ("Temporary register overlaps the source", insn);
1296
1297 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1298 if (max_tmps == MAX_TMPS)
1299 break;
1300 }
1301 }
1302
1303 if (max_tmps == 0)
1304 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1305
1306 if ((length & 1) != 0)
1307 {
1308 if (!qi_tmp)
1309 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1310 }
1311
1312 while (length > 1)
1313 {
1314 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1315 {
1316 if (length >= 4)
1317 {
1318 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1319 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1320 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1321 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1322 offset += 4;
1323 length -= 4;
1324 }
1325 else if (length >= 2)
1326 {
1327 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1328 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1329 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1330 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1331 offset += 2;
1332 length -= 2;
1333 }
1334 else
1335 break;
1336 }
1337
1338 for (i = 0; i < num_tmps; i++)
1339 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1340
1341 for (i = 0; i < num_tmps; i++)
1342 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1343 }
1344
1345 if (length == 1)
1346 {
1347 xops[0] = adj_offsettable_operand (dest, offset);
1348 xops[1] = adj_offsettable_operand (src, offset);
1349 xops[2] = qi_tmp;
1350 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1351 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1352 }
1353
1354 return "";
1355 }
1356
1357 \f
1358 int
1359 standard_80387_constant_p (x)
1360 rtx x;
1361 {
1362 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1363 REAL_VALUE_TYPE d;
1364 jmp_buf handler;
1365 int is0, is1;
1366
1367 if (setjmp (handler))
1368 return 0;
1369
1370 set_float_handler (handler);
1371 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1372 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1373 is1 = REAL_VALUES_EQUAL (d, dconst1);
1374 set_float_handler (NULL_PTR);
1375
1376 if (is0)
1377 return 1;
1378
1379 if (is1)
1380 return 2;
1381
1382 /* Note that on the 80387, other constants, such as pi,
1383 are much slower to load as standard constants
1384 than to load from doubles in memory! */
1385 #endif
1386
1387 return 0;
1388 }
1389
1390 char *
1391 output_move_const_single (operands)
1392 rtx *operands;
1393 {
1394 if (FP_REG_P (operands[0]))
1395 {
1396 int conval = standard_80387_constant_p (operands[1]);
1397
1398 if (conval == 1)
1399 return "fldz";
1400
1401 if (conval == 2)
1402 return "fld1";
1403 }
1404 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1405 {
1406 REAL_VALUE_TYPE r; long l;
1407
1408 if (GET_MODE (operands[1]) == XFmode)
1409 abort ();
1410
1411 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1412 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1413 operands[1] = GEN_INT (l);
1414 }
1415 return singlemove_string (operands);
1416 }
1417 \f
1418 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1419 reference and a constant. */
1420
1421 int
1422 symbolic_operand (op, mode)
1423 register rtx op;
1424 enum machine_mode mode;
1425 {
1426 switch (GET_CODE (op))
1427 {
1428 case SYMBOL_REF:
1429 case LABEL_REF:
1430 return 1;
1431 case CONST:
1432 op = XEXP (op, 0);
1433 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1434 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1435 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1436 default:
1437 return 0;
1438 }
1439 }
1440
1441 /* Test for a valid operand for a call instruction.
1442 Don't allow the arg pointer register or virtual regs
1443 since they may change into reg + const, which the patterns
1444 can't handle yet. */
1445
1446 int
1447 call_insn_operand (op, mode)
1448 rtx op;
1449 enum machine_mode mode;
1450 {
1451 if (GET_CODE (op) == MEM
1452 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1453 /* This makes a difference for PIC. */
1454 && general_operand (XEXP (op, 0), Pmode))
1455 || (GET_CODE (XEXP (op, 0)) == REG
1456 && XEXP (op, 0) != arg_pointer_rtx
1457 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1458 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1459 return 1;
1460 return 0;
1461 }
1462
1463 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1464 even if pic. */
1465
1466 int
1467 expander_call_insn_operand (op, mode)
1468 rtx op;
1469 enum machine_mode mode;
1470 {
1471 if (GET_CODE (op) == MEM
1472 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1473 || (GET_CODE (XEXP (op, 0)) == REG
1474 && XEXP (op, 0) != arg_pointer_rtx
1475 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1476 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1477 return 1;
1478 return 0;
1479 }
1480
1481 /* Return 1 if OP is a comparison operator that can use the condition code
1482 generated by an arithmetic operation. */
1483
1484 int
1485 arithmetic_comparison_operator (op, mode)
1486 register rtx op;
1487 enum machine_mode mode;
1488 {
1489 enum rtx_code code;
1490
1491 if (mode != VOIDmode && mode != GET_MODE (op))
1492 return 0;
1493 code = GET_CODE (op);
1494 if (GET_RTX_CLASS (code) != '<')
1495 return 0;
1496
1497 return (code != GT && code != LE);
1498 }
1499 \f
1500 /* Returns 1 if OP contains a symbol reference */
1501
1502 int
1503 symbolic_reference_mentioned_p (op)
1504 rtx op;
1505 {
1506 register char *fmt;
1507 register int i;
1508
1509 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1510 return 1;
1511
1512 fmt = GET_RTX_FORMAT (GET_CODE (op));
1513 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1514 {
1515 if (fmt[i] == 'E')
1516 {
1517 register int j;
1518
1519 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1520 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1521 return 1;
1522 }
1523 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1524 return 1;
1525 }
1526
1527 return 0;
1528 }
1529 \f
1530 /* Attempt to expand a binary operator. Make the expansion closer to the
1531 actual machine, then just general_operand, which will allow 3 separate
1532 memory references (one output, two input) in a single insn. Return
1533 whether the insn fails, or succeeds. */
1534
1535 int
1536 ix86_expand_binary_operator (code, mode, operands)
1537 enum rtx_code code;
1538 enum machine_mode mode;
1539 rtx operands[];
1540 {
1541 rtx insn;
1542 int i;
1543 int modified;
1544
1545 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1546 if (GET_RTX_CLASS (code) == 'c'
1547 && (rtx_equal_p (operands[0], operands[2])
1548 || immediate_operand (operands[1], mode)))
1549 {
1550 rtx temp = operands[1];
1551 operands[1] = operands[2];
1552 operands[2] = temp;
1553 }
1554
1555 /* If optimizing, copy to regs to improve CSE */
1556 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1557 {
1558 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1559 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1560
1561 if (GET_CODE (operands[2]) == MEM)
1562 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1563
1564 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1565 {
1566 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1567 emit_move_insn (temp, operands[1]);
1568 operands[1] = temp;
1569 return TRUE;
1570 }
1571 }
1572
1573 if (!ix86_binary_operator_ok (code, mode, operands))
1574 {
1575 /* If not optimizing, try to make a valid insn (optimize code previously did
1576 this above to improve chances of CSE) */
1577
1578 if ((!TARGET_PSEUDO || !optimize)
1579 && ((reload_in_progress | reload_completed) == 0)
1580 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1581 {
1582 modified = FALSE;
1583 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1584 {
1585 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1586 modified = TRUE;
1587 }
1588
1589 if (GET_CODE (operands[2]) == MEM)
1590 {
1591 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1592 modified = TRUE;
1593 }
1594
1595 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1596 {
1597 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1598 emit_move_insn (temp, operands[1]);
1599 operands[1] = temp;
1600 return TRUE;
1601 }
1602
1603 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1604 return FALSE;
1605 }
1606 else
1607 return FALSE;
1608 }
1609
1610 return TRUE;
1611 }
1612 \f
1613 /* Return TRUE or FALSE depending on whether the binary operator meets the
1614 appropriate constraints. */
1615
1616 int
1617 ix86_binary_operator_ok (code, mode, operands)
1618 enum rtx_code code;
1619 enum machine_mode mode;
1620 rtx operands[3];
1621 {
1622 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1623 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1624 }
1625 \f
1626 /* Attempt to expand a unary operator. Make the expansion closer to the
1627 actual machine, then just general_operand, which will allow 2 separate
1628 memory references (one output, one input) in a single insn. Return
1629 whether the insn fails, or succeeds. */
1630
1631 int
1632 ix86_expand_unary_operator (code, mode, operands)
1633 enum rtx_code code;
1634 enum machine_mode mode;
1635 rtx operands[];
1636 {
1637 rtx insn;
1638
1639 /* If optimizing, copy to regs to improve CSE */
1640 if (TARGET_PSEUDO
1641 && optimize
1642 && ((reload_in_progress | reload_completed) == 0)
1643 && GET_CODE (operands[1]) == MEM)
1644 {
1645 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1646 }
1647
1648 if (!ix86_unary_operator_ok (code, mode, operands))
1649 {
1650 if ((!TARGET_PSEUDO || !optimize)
1651 && ((reload_in_progress | reload_completed) == 0)
1652 && GET_CODE (operands[1]) == MEM)
1653 {
1654 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1655 if (!ix86_unary_operator_ok (code, mode, operands))
1656 return FALSE;
1657 }
1658 else
1659 return FALSE;
1660 }
1661
1662 return TRUE;
1663 }
1664 \f
1665 /* Return TRUE or FALSE depending on whether the unary operator meets the
1666 appropriate constraints. */
1667
1668 int
1669 ix86_unary_operator_ok (code, mode, operands)
1670 enum rtx_code code;
1671 enum machine_mode mode;
1672 rtx operands[2];
1673 {
1674 return TRUE;
1675 }
1676
1677 \f
1678
1679 static rtx pic_label_rtx;
1680
1681 /* This function generates code for -fpic that loads %ebx with
1682 with the return address of the caller and then returns. */
1683 void
1684 asm_output_function_prefix (file, name)
1685 FILE * file;
1686 char * name;
1687 {
1688 rtx xops[2];
1689 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1690 || current_function_uses_const_pool);
1691 xops[0] = pic_offset_table_rtx;
1692 xops[1] = stack_pointer_rtx;
1693
1694 /* deep branch prediction favors having a return for every call */
1695 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1696 {
1697 if (pic_label_rtx == 0)
1698 pic_label_rtx = (rtx) gen_label_rtx ();
1699 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (pic_label_rtx));
1700 output_asm_insn ("movl (%1),%0", xops);
1701 output_asm_insn ("ret", xops);
1702 }
1703 }
1704
1705 /* Set up the stack and frame (if desired) for the function. */
1706
1707 void
1708 function_prologue (file, size)
1709 FILE *file;
1710 int size;
1711 {
1712 }
1713
1714 /* This function generates the assembly code for function entry.
1715 FILE is an stdio stream to output the code to.
1716 SIZE is an int: how many units of temporary storage to allocate. */
1717
1718 void
1719 ix86_expand_prologue ()
1720 {
1721 register int regno;
1722 int limit;
1723 rtx xops[4];
1724 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1725 || current_function_uses_const_pool);
1726 long tsize = get_frame_size ();
1727
1728 xops[0] = stack_pointer_rtx;
1729 xops[1] = frame_pointer_rtx;
1730 xops[2] = GEN_INT (tsize);
1731 if (frame_pointer_needed)
1732 {
1733 emit_insn (gen_rtx (SET, 0,
1734 gen_rtx (MEM, SImode,
1735 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1736 frame_pointer_rtx));
1737 emit_move_insn (xops[1], xops[0]);
1738 /* output_asm_insn ("push%L1 %1", xops); */
1739 /* output_asm_insn (AS2 (mov%L0,%0,%1), xops); */
1740 }
1741
1742 if (tsize)
1743 emit_insn (gen_rtx (SET, SImode,
1744 xops[0],
1745 gen_rtx (MINUS, SImode,
1746 xops[0],
1747 xops[2])));
1748
1749 /* output_asm_insn (AS2 (sub%L0,%2,%0), xops);*/
1750
1751 /* Note If use enter it is NOT reversed args.
1752 This one is not reversed from intel!!
1753 I think enter is slower. Also sdb doesn't like it.
1754 But if you want it the code is:
1755 {
1756 xops[3] = const0_rtx;
1757 output_asm_insn ("enter %2,%3", xops);
1758 }
1759 */
1760 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1761 for (regno = limit - 1; regno >= 0; regno--)
1762 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1763 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1764 {
1765 xops[0] = gen_rtx (REG, SImode, regno);
1766 emit_insn (gen_rtx (SET, 0,
1767 gen_rtx (MEM, SImode,
1768 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1769 xops[0]));
1770 /* output_asm_insn ("push%L0 %0", xops);*/
1771 }
1772
1773 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1774 {
1775 xops[0] = pic_offset_table_rtx;
1776 if (pic_label_rtx == 0)
1777 pic_label_rtx = (rtx) gen_label_rtx ();
1778 xops[1] = pic_label_rtx;
1779
1780 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1781 /* output_asm_insn (AS1 (call,%P1), xops);*/
1782 emit_insn (gen_prologue_set_got (xops[0],
1783 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1784 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1785 /* output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);*/
1786 }
1787 else if (pic_reg_used)
1788 {
1789 xops[0] = pic_offset_table_rtx;
1790 xops[1] = (rtx) gen_label_rtx ();
1791
1792 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1793 SCHED_GROUP_P (get_last_insn()) = 1;
1794 /* output_asm_insn (AS1 (call,%P1), xops);*/
1795 /* ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));*/
1796 emit_insn (gen_pop (xops[0]));
1797 /* output_asm_insn (AS1 (pop%L0,%0), xops);*/
1798 emit_insn (gen_prologue_set_got (xops[0],
1799 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1800 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1801 /* output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);*/
1802 }
1803 }
1804
1805 /* Restore function stack, frame, and registers. */
1806
1807 void
1808 function_epilogue (file, size)
1809 FILE *file;
1810 int size;
1811 {
1812 }
1813
1814 /* Return 1 if it is appropriate to emit `ret' instructions in the
1815 body of a function. Do this only if the epilogue is simple, needing a
1816 couple of insns. Prior to reloading, we can't tell how many registers
1817 must be saved, so return 0 then. Return 0 if there is no frame
1818 marker to de-allocate.
1819
1820 If NON_SAVING_SETJMP is defined and true, then it is not possible
1821 for the epilogue to be simple, so return 0. This is a special case
1822 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1823 until final, but jump_optimize may need to know sooner if a
1824 `return' is OK. */
1825
1826 int
1827 ix86_can_use_return_insn_p ()
1828 {
1829 int regno;
1830 int nregs = 0;
1831 int reglimit = (frame_pointer_needed
1832 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1833 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1834 || current_function_uses_const_pool);
1835
1836 #ifdef NON_SAVING_SETJMP
1837 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1838 return 0;
1839 #endif
1840
1841 if (! reload_completed)
1842 return 0;
1843
1844 for (regno = reglimit - 1; regno >= 0; regno--)
1845 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1846 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1847 nregs++;
1848
1849 return nregs == 0 || ! frame_pointer_needed;
1850 }
1851
1852 \f
1853 /* This function generates the assembly code for function exit.
1854 FILE is an stdio stream to output the code to.
1855 SIZE is an int: how many units of temporary storage to deallocate. */
1856
1857 void
1858 ix86_expand_epilogue ()
1859 {
1860 register int regno;
1861 register int nregs, limit;
1862 int offset;
1863 rtx xops[3];
1864 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1865 || current_function_uses_const_pool);
1866 long tsize = get_frame_size ();
1867
1868 /* Compute the number of registers to pop */
1869
1870 limit = (frame_pointer_needed
1871 ? FRAME_POINTER_REGNUM
1872 : STACK_POINTER_REGNUM);
1873
1874 nregs = 0;
1875
1876 for (regno = limit - 1; regno >= 0; regno--)
1877 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1878 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1879 nregs++;
1880
1881 /* sp is often unreliable so we must go off the frame pointer,
1882 */
1883
1884 /* In reality, we may not care if sp is unreliable, because we can
1885 restore the register relative to the frame pointer. In theory,
1886 since each move is the same speed as a pop, and we don't need the
1887 leal, this is faster. For now restore multiple registers the old
1888 way. */
1889
1890 offset = -tsize - (nregs * UNITS_PER_WORD);
1891
1892 xops[2] = stack_pointer_rtx;
1893
1894 if (nregs > 1 || ! frame_pointer_needed)
1895 {
1896 if (frame_pointer_needed)
1897 {
1898 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
1899 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
1900 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1901 }
1902
1903 for (regno = 0; regno < limit; regno++)
1904 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1905 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1906 {
1907 xops[0] = gen_rtx (REG, SImode, regno);
1908 emit_insn (gen_pop (xops[0]));
1909 /* output_asm_insn ("pop%L0 %0", xops);*/
1910 }
1911 }
1912 else
1913 for (regno = 0; regno < limit; regno++)
1914 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1915 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1916 {
1917 xops[0] = gen_rtx (REG, SImode, regno);
1918 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
1919 emit_move_insn (xops[0], xops[1]);
1920 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1921 offset += 4;
1922 }
1923
1924 if (frame_pointer_needed)
1925 {
1926 /* If not an i386, mov & pop is faster than "leave". */
1927
1928 if (TARGET_USE_LEAVE)
1929 emit_insn (gen_leave());
1930 /* output_asm_insn ("leave", xops);*/
1931 else
1932 {
1933 xops[0] = frame_pointer_rtx;
1934 xops[1] = stack_pointer_rtx;
1935 emit_insn (gen_epilogue_set_stack_ptr());
1936 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
1937 emit_insn (gen_pop (xops[0]));
1938 /* output_asm_insn ("pop%L0 %0", xops);*/
1939 }
1940 }
1941 else if (tsize)
1942 {
1943 /* If there is no frame pointer, we must still release the frame. */
1944
1945 xops[0] = GEN_INT (tsize);
1946 emit_insn (gen_rtx (SET, SImode,
1947 xops[2],
1948 gen_rtx (PLUS, SImode,
1949 xops[2],
1950 xops[0])));
1951 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
1952 }
1953
1954 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
1955 if (profile_block_flag == 2)
1956 {
1957 FUNCTION_BLOCK_PROFILER_EXIT(file);
1958 }
1959 #endif
1960
1961 if (current_function_pops_args && current_function_args_size)
1962 {
1963 xops[1] = GEN_INT (current_function_pops_args);
1964
1965 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
1966 asked to pop more, pop return address, do explicit add, and jump
1967 indirectly to the caller. */
1968
1969 if (current_function_pops_args >= 32768)
1970 {
1971 /* ??? Which register to use here? */
1972 xops[0] = gen_rtx (REG, SImode, 2);
1973 emit_insn (gen_pop (xops[0]));
1974 /* output_asm_insn ("pop%L0 %0", xops);*/
1975 emit_insn (gen_rtx (SET, SImode,
1976 xops[2],
1977 gen_rtx (PLUS, SImode,
1978 xops[1],
1979 xops[2])));
1980 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
1981 emit_jump_insn (xops[0]);
1982 /* output_asm_insn ("jmp %*%0", xops);*/
1983 }
1984 else
1985 emit_jump_insn (gen_return_pop_internal (xops[1]));
1986 /* output_asm_insn ("ret %1", xops);*/
1987 }
1988 else
1989 /* output_asm_insn ("ret", xops);*/
1990 emit_jump_insn (gen_return_internal ());
1991 }
1992
1993 \f
1994 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1995 that is a valid memory address for an instruction.
1996 The MODE argument is the machine mode for the MEM expression
1997 that wants to use this address.
1998
1999 On x86, legitimate addresses are:
2000 base movl (base),reg
2001 displacement movl disp,reg
2002 base + displacement movl disp(base),reg
2003 index + base movl (base,index),reg
2004 (index + base) + displacement movl disp(base,index),reg
2005 index*scale movl (,index,scale),reg
2006 index*scale + disp movl disp(,index,scale),reg
2007 index*scale + base movl (base,index,scale),reg
2008 (index*scale + base) + disp movl disp(base,index,scale),reg
2009
2010 In each case, scale can be 1, 2, 4, 8. */
2011
2012 /* This is exactly the same as print_operand_addr, except that
2013 it recognizes addresses instead of printing them.
2014
2015 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2016 convert common non-canonical forms to canonical form so that they will
2017 be recognized. */
2018
2019 #define ADDR_INVALID(msg,insn) \
2020 do { \
2021 if (TARGET_DEBUG_ADDR) \
2022 { \
2023 fprintf (stderr, msg); \
2024 debug_rtx (insn); \
2025 } \
2026 } while (0)
2027
2028 int
2029 legitimate_address_p (mode, addr, strict)
2030 enum machine_mode mode;
2031 register rtx addr;
2032 int strict;
2033 {
2034 rtx base = NULL_RTX;
2035 rtx indx = NULL_RTX;
2036 rtx scale = NULL_RTX;
2037 rtx disp = NULL_RTX;
2038
2039 if (TARGET_DEBUG_ADDR)
2040 {
2041 fprintf (stderr,
2042 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2043 GET_MODE_NAME (mode), strict);
2044
2045 debug_rtx (addr);
2046 }
2047
2048 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2049 base = addr; /* base reg */
2050
2051 else if (GET_CODE (addr) == PLUS)
2052 {
2053 rtx op0 = XEXP (addr, 0);
2054 rtx op1 = XEXP (addr, 1);
2055 enum rtx_code code0 = GET_CODE (op0);
2056 enum rtx_code code1 = GET_CODE (op1);
2057
2058 if (code0 == REG || code0 == SUBREG)
2059 {
2060 if (code1 == REG || code1 == SUBREG)
2061 {
2062 indx = op0; /* index + base */
2063 base = op1;
2064 }
2065
2066 else
2067 {
2068 base = op0; /* base + displacement */
2069 disp = op1;
2070 }
2071 }
2072
2073 else if (code0 == MULT)
2074 {
2075 indx = XEXP (op0, 0);
2076 scale = XEXP (op0, 1);
2077
2078 if (code1 == REG || code1 == SUBREG)
2079 base = op1; /* index*scale + base */
2080
2081 else
2082 disp = op1; /* index*scale + disp */
2083 }
2084
2085 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2086 {
2087 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2088 scale = XEXP (XEXP (op0, 0), 1);
2089 base = XEXP (op0, 1);
2090 disp = op1;
2091 }
2092
2093 else if (code0 == PLUS)
2094 {
2095 indx = XEXP (op0, 0); /* index + base + disp */
2096 base = XEXP (op0, 1);
2097 disp = op1;
2098 }
2099
2100 else
2101 {
2102 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2103 return FALSE;
2104 }
2105 }
2106
2107 else if (GET_CODE (addr) == MULT)
2108 {
2109 indx = XEXP (addr, 0); /* index*scale */
2110 scale = XEXP (addr, 1);
2111 }
2112
2113 else
2114 disp = addr; /* displacement */
2115
2116 /* Allow arg pointer and stack pointer as index if there is not scaling */
2117 if (base && indx && !scale
2118 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2119 {
2120 rtx tmp = base;
2121 base = indx;
2122 indx = tmp;
2123 }
2124
2125 /* Validate base register */
2126 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2127 is one word out of a two word structure, which is represented internally
2128 as a DImode int. */
2129 if (base)
2130 {
2131 if (GET_CODE (base) != REG)
2132 {
2133 ADDR_INVALID ("Base is not a register.\n", base);
2134 return FALSE;
2135 }
2136
2137 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2138 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2139 {
2140 ADDR_INVALID ("Base is not valid.\n", base);
2141 return FALSE;
2142 }
2143 }
2144
2145 /* Validate index register */
2146 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2147 is one word out of a two word structure, which is represented internally
2148 as a DImode int. */
2149 if (indx)
2150 {
2151 if (GET_CODE (indx) != REG)
2152 {
2153 ADDR_INVALID ("Index is not a register.\n", indx);
2154 return FALSE;
2155 }
2156
2157 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2158 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2159 {
2160 ADDR_INVALID ("Index is not valid.\n", indx);
2161 return FALSE;
2162 }
2163 }
2164 else if (scale)
2165 abort (); /* scale w/o index invalid */
2166
2167 /* Validate scale factor */
2168 if (scale)
2169 {
2170 HOST_WIDE_INT value;
2171
2172 if (GET_CODE (scale) != CONST_INT)
2173 {
2174 ADDR_INVALID ("Scale is not valid.\n", scale);
2175 return FALSE;
2176 }
2177
2178 value = INTVAL (scale);
2179 if (value != 1 && value != 2 && value != 4 && value != 8)
2180 {
2181 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2182 return FALSE;
2183 }
2184 }
2185
2186 /* Validate displacement
2187 Constant pool addresses must be handled special. They are
2188 considered legitimate addresses, but only if not used with regs.
2189 When printed, the output routines know to print the reference with the
2190 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2191 if (disp)
2192 {
2193 if (GET_CODE (disp) == SYMBOL_REF
2194 && CONSTANT_POOL_ADDRESS_P (disp)
2195 && !base
2196 && !indx)
2197 ;
2198
2199 else if (!CONSTANT_ADDRESS_P (disp))
2200 {
2201 ADDR_INVALID ("Displacement is not valid.\n", disp);
2202 return FALSE;
2203 }
2204
2205 else if (GET_CODE (disp) == CONST_DOUBLE)
2206 {
2207 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2208 return FALSE;
2209 }
2210
2211 else if (flag_pic && SYMBOLIC_CONST (disp)
2212 && base != pic_offset_table_rtx
2213 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2214 {
2215 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2216 return FALSE;
2217 }
2218
2219 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2220 && (base != NULL_RTX || indx != NULL_RTX))
2221 {
2222 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2223 return FALSE;
2224 }
2225 }
2226
2227 if (TARGET_DEBUG_ADDR)
2228 fprintf (stderr, "Address is valid.\n");
2229
2230 /* Everything looks valid, return true */
2231 return TRUE;
2232 }
2233
2234 \f
2235 /* Return a legitimate reference for ORIG (an address) using the
2236 register REG. If REG is 0, a new pseudo is generated.
2237
2238 There are three types of references that must be handled:
2239
2240 1. Global data references must load the address from the GOT, via
2241 the PIC reg. An insn is emitted to do this load, and the reg is
2242 returned.
2243
2244 2. Static data references must compute the address as an offset
2245 from the GOT, whose base is in the PIC reg. An insn is emitted to
2246 compute the address into a reg, and the reg is returned. Static
2247 data objects have SYMBOL_REF_FLAG set to differentiate them from
2248 global data objects.
2249
2250 3. Constant pool addresses must be handled special. They are
2251 considered legitimate addresses, but only if not used with regs.
2252 When printed, the output routines know to print the reference with the
2253 PIC reg, even though the PIC reg doesn't appear in the RTL.
2254
2255 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2256 reg also appears in the address (except for constant pool references,
2257 noted above).
2258
2259 "switch" statements also require special handling when generating
2260 PIC code. See comments by the `casesi' insn in i386.md for details. */
2261
2262 rtx
2263 legitimize_pic_address (orig, reg)
2264 rtx orig;
2265 rtx reg;
2266 {
2267 rtx addr = orig;
2268 rtx new = orig;
2269
2270 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2271 {
2272 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2273 reg = new = orig;
2274 else
2275 {
2276 if (reg == 0)
2277 reg = gen_reg_rtx (Pmode);
2278
2279 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2280 || GET_CODE (addr) == LABEL_REF)
2281 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2282 else
2283 new = gen_rtx (MEM, Pmode,
2284 gen_rtx (PLUS, Pmode,
2285 pic_offset_table_rtx, orig));
2286
2287 emit_move_insn (reg, new);
2288 }
2289 current_function_uses_pic_offset_table = 1;
2290 return reg;
2291 }
2292 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2293 {
2294 rtx base;
2295
2296 if (GET_CODE (addr) == CONST)
2297 {
2298 addr = XEXP (addr, 0);
2299 if (GET_CODE (addr) != PLUS)
2300 abort ();
2301 }
2302
2303 if (XEXP (addr, 0) == pic_offset_table_rtx)
2304 return orig;
2305
2306 if (reg == 0)
2307 reg = gen_reg_rtx (Pmode);
2308
2309 base = legitimize_pic_address (XEXP (addr, 0), reg);
2310 addr = legitimize_pic_address (XEXP (addr, 1),
2311 base == reg ? NULL_RTX : reg);
2312
2313 if (GET_CODE (addr) == CONST_INT)
2314 return plus_constant (base, INTVAL (addr));
2315
2316 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2317 {
2318 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2319 addr = XEXP (addr, 1);
2320 }
2321 return gen_rtx (PLUS, Pmode, base, addr);
2322 }
2323 return new;
2324 }
2325 \f
2326
2327 /* Emit insns to move operands[1] into operands[0]. */
2328
2329 void
2330 emit_pic_move (operands, mode)
2331 rtx *operands;
2332 enum machine_mode mode;
2333 {
2334 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2335
2336 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2337 operands[1] = (rtx) force_reg (SImode, operands[1]);
2338 else
2339 operands[1] = legitimize_pic_address (operands[1], temp);
2340 }
2341
2342 \f
2343 /* Try machine-dependent ways of modifying an illegitimate address
2344 to be legitimate. If we find one, return the new, valid address.
2345 This macro is used in only one place: `memory_address' in explow.c.
2346
2347 OLDX is the address as it was before break_out_memory_refs was called.
2348 In some cases it is useful to look at this to decide what needs to be done.
2349
2350 MODE and WIN are passed so that this macro can use
2351 GO_IF_LEGITIMATE_ADDRESS.
2352
2353 It is always safe for this macro to do nothing. It exists to recognize
2354 opportunities to optimize the output.
2355
2356 For the 80386, we handle X+REG by loading X into a register R and
2357 using R+REG. R will go in a general reg and indexing will be used.
2358 However, if REG is a broken-out memory address or multiplication,
2359 nothing needs to be done because REG can certainly go in a general reg.
2360
2361 When -fpic is used, special handling is needed for symbolic references.
2362 See comments by legitimize_pic_address in i386.c for details. */
2363
2364 rtx
2365 legitimize_address (x, oldx, mode)
2366 register rtx x;
2367 register rtx oldx;
2368 enum machine_mode mode;
2369 {
2370 int changed = 0;
2371 unsigned log;
2372
2373 if (TARGET_DEBUG_ADDR)
2374 {
2375 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2376 debug_rtx (x);
2377 }
2378
2379 if (flag_pic && SYMBOLIC_CONST (x))
2380 return legitimize_pic_address (x, 0);
2381
2382 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2383 if (GET_CODE (x) == ASHIFT
2384 && GET_CODE (XEXP (x, 1)) == CONST_INT
2385 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2386 {
2387 changed = 1;
2388 x = gen_rtx (MULT, Pmode,
2389 force_reg (Pmode, XEXP (x, 0)),
2390 GEN_INT (1 << log));
2391 }
2392
2393 if (GET_CODE (x) == PLUS)
2394 {
2395 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2396 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2397 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2398 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2399 {
2400 changed = 1;
2401 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2402 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2403 GEN_INT (1 << log));
2404 }
2405
2406 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2407 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2408 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2409 {
2410 changed = 1;
2411 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2412 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2413 GEN_INT (1 << log));
2414 }
2415
2416 /* Put multiply first if it isn't already */
2417 if (GET_CODE (XEXP (x, 1)) == MULT)
2418 {
2419 rtx tmp = XEXP (x, 0);
2420 XEXP (x, 0) = XEXP (x, 1);
2421 XEXP (x, 1) = tmp;
2422 changed = 1;
2423 }
2424
2425 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2426 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2427 created by virtual register instantiation, register elimination, and
2428 similar optimizations. */
2429 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2430 {
2431 changed = 1;
2432 x = gen_rtx (PLUS, Pmode,
2433 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2434 XEXP (XEXP (x, 1), 1));
2435 }
2436
2437 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2438 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2439 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2440 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2441 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2442 && CONSTANT_P (XEXP (x, 1)))
2443 {
2444 rtx constant, other;
2445
2446 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2447 {
2448 constant = XEXP (x, 1);
2449 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2450 }
2451 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2452 {
2453 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2454 other = XEXP (x, 1);
2455 }
2456 else
2457 constant = 0;
2458
2459 if (constant)
2460 {
2461 changed = 1;
2462 x = gen_rtx (PLUS, Pmode,
2463 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2464 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2465 plus_constant (other, INTVAL (constant)));
2466 }
2467 }
2468
2469 if (changed && legitimate_address_p (mode, x, FALSE))
2470 return x;
2471
2472 if (GET_CODE (XEXP (x, 0)) == MULT)
2473 {
2474 changed = 1;
2475 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2476 }
2477
2478 if (GET_CODE (XEXP (x, 1)) == MULT)
2479 {
2480 changed = 1;
2481 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2482 }
2483
2484 if (changed
2485 && GET_CODE (XEXP (x, 1)) == REG
2486 && GET_CODE (XEXP (x, 0)) == REG)
2487 return x;
2488
2489 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2490 {
2491 changed = 1;
2492 x = legitimize_pic_address (x, 0);
2493 }
2494
2495 if (changed && legitimate_address_p (mode, x, FALSE))
2496 return x;
2497
2498 if (GET_CODE (XEXP (x, 0)) == REG)
2499 {
2500 register rtx temp = gen_reg_rtx (Pmode);
2501 register rtx val = force_operand (XEXP (x, 1), temp);
2502 if (val != temp)
2503 emit_move_insn (temp, val);
2504
2505 XEXP (x, 1) = temp;
2506 return x;
2507 }
2508
2509 else if (GET_CODE (XEXP (x, 1)) == REG)
2510 {
2511 register rtx temp = gen_reg_rtx (Pmode);
2512 register rtx val = force_operand (XEXP (x, 0), temp);
2513 if (val != temp)
2514 emit_move_insn (temp, val);
2515
2516 XEXP (x, 0) = temp;
2517 return x;
2518 }
2519 }
2520
2521 return x;
2522 }
2523
2524 \f
2525 /* Print an integer constant expression in assembler syntax. Addition
2526 and subtraction are the only arithmetic that may appear in these
2527 expressions. FILE is the stdio stream to write to, X is the rtx, and
2528 CODE is the operand print code from the output string. */
2529
2530 static void
2531 output_pic_addr_const (file, x, code)
2532 FILE *file;
2533 rtx x;
2534 int code;
2535 {
2536 char buf[256];
2537
2538 switch (GET_CODE (x))
2539 {
2540 case PC:
2541 if (flag_pic)
2542 putc ('.', file);
2543 else
2544 abort ();
2545 break;
2546
2547 case SYMBOL_REF:
2548 case LABEL_REF:
2549 if (GET_CODE (x) == SYMBOL_REF)
2550 assemble_name (file, XSTR (x, 0));
2551 else
2552 {
2553 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2554 CODE_LABEL_NUMBER (XEXP (x, 0)));
2555 assemble_name (asm_out_file, buf);
2556 }
2557
2558 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2559 fprintf (file, "@GOTOFF(%%ebx)");
2560 else if (code == 'P')
2561 fprintf (file, "@PLT");
2562 else if (GET_CODE (x) == LABEL_REF)
2563 fprintf (file, "@GOTOFF");
2564 else if (! SYMBOL_REF_FLAG (x))
2565 fprintf (file, "@GOT");
2566 else
2567 fprintf (file, "@GOTOFF");
2568
2569 break;
2570
2571 case CODE_LABEL:
2572 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2573 assemble_name (asm_out_file, buf);
2574 break;
2575
2576 case CONST_INT:
2577 fprintf (file, "%d", INTVAL (x));
2578 break;
2579
2580 case CONST:
2581 /* This used to output parentheses around the expression,
2582 but that does not work on the 386 (either ATT or BSD assembler). */
2583 output_pic_addr_const (file, XEXP (x, 0), code);
2584 break;
2585
2586 case CONST_DOUBLE:
2587 if (GET_MODE (x) == VOIDmode)
2588 {
2589 /* We can use %d if the number is <32 bits and positive. */
2590 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2591 fprintf (file, "0x%x%08x",
2592 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2593 else
2594 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2595 }
2596 else
2597 /* We can't handle floating point constants;
2598 PRINT_OPERAND must handle them. */
2599 output_operand_lossage ("floating constant misused");
2600 break;
2601
2602 case PLUS:
2603 /* Some assemblers need integer constants to appear last (eg masm). */
2604 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2605 {
2606 output_pic_addr_const (file, XEXP (x, 1), code);
2607 if (INTVAL (XEXP (x, 0)) >= 0)
2608 fprintf (file, "+");
2609 output_pic_addr_const (file, XEXP (x, 0), code);
2610 }
2611 else
2612 {
2613 output_pic_addr_const (file, XEXP (x, 0), code);
2614 if (INTVAL (XEXP (x, 1)) >= 0)
2615 fprintf (file, "+");
2616 output_pic_addr_const (file, XEXP (x, 1), code);
2617 }
2618 break;
2619
2620 case MINUS:
2621 output_pic_addr_const (file, XEXP (x, 0), code);
2622 fprintf (file, "-");
2623 output_pic_addr_const (file, XEXP (x, 1), code);
2624 break;
2625
2626 default:
2627 output_operand_lossage ("invalid expression as operand");
2628 }
2629 }
2630 \f
2631
2632 /* Append the correct conditional move suffix which corresponds to CODE */
2633
2634 static void
2635 put_condition_code (code, file)
2636 enum rtx_code code;
2637 FILE * file;
2638 {
2639 switch (code)
2640 {
2641 case NE:
2642 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2643 fputs ("b", file);
2644 else
2645 fputs ("ne", file);
2646 return;
2647 case EQ:
2648 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2649 fputs ("ae", file);
2650 else
2651 fputs ("e", file);
2652 return;
2653 case GE:
2654 fputs ("ge", file); return;
2655 case GT:
2656 fputs ("g", file); return;
2657 case LE:
2658 fputs ("le", file); return;
2659 case LT:
2660 fputs ("l", file); return;
2661 case GEU:
2662 fputs ("ae", file); return;
2663 case GTU:
2664 fputs ("a", file); return;
2665 case LEU:
2666 fputs ("be", file); return;
2667 case LTU:
2668 fputs ("b", file); return;
2669 default: output_operand_lossage ("Invalid %%C operand");
2670 }
2671 }
2672
2673 /* Meaning of CODE:
2674 f -- float insn (print a CONST_DOUBLE as a float rather than in hex).
2675 D,L,W,B,Q,S -- print the opcode suffix for specified size of operand.
2676 C -- print opcode suffix for set/cmov insn.
2677 N -- like C, but print reversed condition
2678 R -- print the prefix for register names.
2679 z -- print the opcode suffix for the size of the current operand.
2680 * -- print a star (in certain assembler syntax)
2681 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2682 c -- don't print special prefixes before constant operands.
2683 J -- print the appropriate jump operand.
2684 */
2685
2686 void
2687 print_operand (file, x, code)
2688 FILE *file;
2689 rtx x;
2690 int code;
2691 {
2692 if (code)
2693 {
2694 switch (code)
2695 {
2696 case '*':
2697 if (USE_STAR)
2698 putc ('*', file);
2699 return;
2700
2701 case 'L':
2702 PUT_OP_SIZE (code, 'l', file);
2703 return;
2704
2705 case 'W':
2706 PUT_OP_SIZE (code, 'w', file);
2707 return;
2708
2709 case 'B':
2710 PUT_OP_SIZE (code, 'b', file);
2711 return;
2712
2713 case 'Q':
2714 PUT_OP_SIZE (code, 'l', file);
2715 return;
2716
2717 case 'S':
2718 PUT_OP_SIZE (code, 's', file);
2719 return;
2720
2721 case 'T':
2722 PUT_OP_SIZE (code, 't', file);
2723 return;
2724
2725 case 'z':
2726 /* 387 opcodes don't get size suffixes if the operands are
2727 registers. */
2728
2729 if (STACK_REG_P (x))
2730 return;
2731
2732 /* this is the size of op from size of operand */
2733 switch (GET_MODE_SIZE (GET_MODE (x)))
2734 {
2735 case 1:
2736 PUT_OP_SIZE ('B', 'b', file);
2737 return;
2738
2739 case 2:
2740 PUT_OP_SIZE ('W', 'w', file);
2741 return;
2742
2743 case 4:
2744 if (GET_MODE (x) == SFmode)
2745 {
2746 PUT_OP_SIZE ('S', 's', file);
2747 return;
2748 }
2749 else
2750 PUT_OP_SIZE ('L', 'l', file);
2751 return;
2752
2753 case 12:
2754 PUT_OP_SIZE ('T', 't', file);
2755 return;
2756
2757 case 8:
2758 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2759 {
2760 #ifdef GAS_MNEMONICS
2761 PUT_OP_SIZE ('Q', 'q', file);
2762 return;
2763 #else
2764 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2765 #endif
2766 }
2767
2768 PUT_OP_SIZE ('Q', 'l', file);
2769 return;
2770 }
2771
2772 case 'b':
2773 case 'w':
2774 case 'k':
2775 case 'h':
2776 case 'y':
2777 case 'P':
2778 break;
2779
2780 case 'J':
2781 switch (GET_CODE (x))
2782 {
2783 /* These conditions are appropriate for testing the result
2784 of an arithmetic operation, not for a compare operation.
2785 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2786 CC_Z_IN_NOT_C false and not floating point. */
2787 case NE: fputs ("jne", file); return;
2788 case EQ: fputs ("je", file); return;
2789 case GE: fputs ("jns", file); return;
2790 case LT: fputs ("js", file); return;
2791 case GEU: fputs ("jmp", file); return;
2792 case GTU: fputs ("jne", file); return;
2793 case LEU: fputs ("je", file); return;
2794 case LTU: fputs ("#branch never", file); return;
2795
2796 /* no matching branches for GT nor LE */
2797 }
2798 abort ();
2799
2800 /* This is used by the conditional move instructions. */
2801 case 'C':
2802 put_condition_code (GET_CODE (x), file);
2803 return;
2804 /* like above, but reverse condition */
2805 case 'N':
2806 put_condition_code (reverse_condition (GET_CODE (x)), file);
2807 return;
2808
2809 default:
2810 {
2811 char str[50];
2812
2813 sprintf (str, "invalid operand code `%c'", code);
2814 output_operand_lossage (str);
2815 }
2816 }
2817 }
2818 if (GET_CODE (x) == REG)
2819 {
2820 PRINT_REG (x, code, file);
2821 }
2822 else if (GET_CODE (x) == MEM)
2823 {
2824 PRINT_PTR (x, file);
2825 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2826 {
2827 if (flag_pic)
2828 output_pic_addr_const (file, XEXP (x, 0), code);
2829 else
2830 output_addr_const (file, XEXP (x, 0));
2831 }
2832 else
2833 output_address (XEXP (x, 0));
2834 }
2835 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
2836 {
2837 REAL_VALUE_TYPE r; long l;
2838 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2839 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2840 PRINT_IMMED_PREFIX (file);
2841 fprintf (file, "0x%x", l);
2842 }
2843 /* These float cases don't actually occur as immediate operands. */
2844 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
2845 {
2846 REAL_VALUE_TYPE r; char dstr[30];
2847 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2848 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2849 fprintf (file, "%s", dstr);
2850 }
2851 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
2852 {
2853 REAL_VALUE_TYPE r; char dstr[30];
2854 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2855 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2856 fprintf (file, "%s", dstr);
2857 }
2858 else
2859 {
2860 if (code != 'P')
2861 {
2862 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2863 PRINT_IMMED_PREFIX (file);
2864 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
2865 || GET_CODE (x) == LABEL_REF)
2866 PRINT_OFFSET_PREFIX (file);
2867 }
2868 if (flag_pic)
2869 output_pic_addr_const (file, x, code);
2870 else
2871 output_addr_const (file, x);
2872 }
2873 }
2874 \f
2875 /* Print a memory operand whose address is ADDR. */
2876
2877 void
2878 print_operand_address (file, addr)
2879 FILE *file;
2880 register rtx addr;
2881 {
2882 register rtx reg1, reg2, breg, ireg;
2883 rtx offset;
2884
2885 switch (GET_CODE (addr))
2886 {
2887 case REG:
2888 ADDR_BEG (file);
2889 fprintf (file, "%se", RP);
2890 fputs (hi_reg_name[REGNO (addr)], file);
2891 ADDR_END (file);
2892 break;
2893
2894 case PLUS:
2895 reg1 = 0;
2896 reg2 = 0;
2897 ireg = 0;
2898 breg = 0;
2899 offset = 0;
2900 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
2901 {
2902 offset = XEXP (addr, 0);
2903 addr = XEXP (addr, 1);
2904 }
2905 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
2906 {
2907 offset = XEXP (addr, 1);
2908 addr = XEXP (addr, 0);
2909 }
2910 if (GET_CODE (addr) != PLUS) ;
2911 else if (GET_CODE (XEXP (addr, 0)) == MULT)
2912 {
2913 reg1 = XEXP (addr, 0);
2914 addr = XEXP (addr, 1);
2915 }
2916 else if (GET_CODE (XEXP (addr, 1)) == MULT)
2917 {
2918 reg1 = XEXP (addr, 1);
2919 addr = XEXP (addr, 0);
2920 }
2921 else if (GET_CODE (XEXP (addr, 0)) == REG)
2922 {
2923 reg1 = XEXP (addr, 0);
2924 addr = XEXP (addr, 1);
2925 }
2926 else if (GET_CODE (XEXP (addr, 1)) == REG)
2927 {
2928 reg1 = XEXP (addr, 1);
2929 addr = XEXP (addr, 0);
2930 }
2931 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
2932 {
2933 if (reg1 == 0) reg1 = addr;
2934 else reg2 = addr;
2935 addr = 0;
2936 }
2937 if (offset != 0)
2938 {
2939 if (addr != 0) abort ();
2940 addr = offset;
2941 }
2942 if ((reg1 && GET_CODE (reg1) == MULT)
2943 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
2944 {
2945 breg = reg2;
2946 ireg = reg1;
2947 }
2948 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
2949 {
2950 breg = reg1;
2951 ireg = reg2;
2952 }
2953
2954 if (ireg != 0 || breg != 0)
2955 {
2956 int scale = 1;
2957
2958 if (addr != 0)
2959 {
2960 if (flag_pic)
2961 output_pic_addr_const (file, addr, 0);
2962
2963 else if (GET_CODE (addr) == LABEL_REF)
2964 output_asm_label (addr);
2965
2966 else
2967 output_addr_const (file, addr);
2968 }
2969
2970 if (ireg != 0 && GET_CODE (ireg) == MULT)
2971 {
2972 scale = INTVAL (XEXP (ireg, 1));
2973 ireg = XEXP (ireg, 0);
2974 }
2975
2976 /* The stack pointer can only appear as a base register,
2977 never an index register, so exchange the regs if it is wrong. */
2978
2979 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
2980 {
2981 rtx tmp;
2982
2983 tmp = breg;
2984 breg = ireg;
2985 ireg = tmp;
2986 }
2987
2988 /* output breg+ireg*scale */
2989 PRINT_B_I_S (breg, ireg, scale, file);
2990 break;
2991 }
2992
2993 case MULT:
2994 {
2995 int scale;
2996 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
2997 {
2998 scale = INTVAL (XEXP (addr, 0));
2999 ireg = XEXP (addr, 1);
3000 }
3001 else
3002 {
3003 scale = INTVAL (XEXP (addr, 1));
3004 ireg = XEXP (addr, 0);
3005 }
3006 output_addr_const (file, const0_rtx);
3007 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3008 }
3009 break;
3010
3011 default:
3012 if (GET_CODE (addr) == CONST_INT
3013 && INTVAL (addr) < 0x8000
3014 && INTVAL (addr) >= -0x8000)
3015 fprintf (file, "%d", INTVAL (addr));
3016 else
3017 {
3018 if (flag_pic)
3019 output_pic_addr_const (file, addr, 0);
3020 else
3021 output_addr_const (file, addr);
3022 }
3023 }
3024 }
3025 \f
3026 /* Set the cc_status for the results of an insn whose pattern is EXP.
3027 On the 80386, we assume that only test and compare insns, as well
3028 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3029 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3030 Also, we assume that jumps, moves and sCOND don't affect the condition
3031 codes. All else clobbers the condition codes, by assumption.
3032
3033 We assume that ALL integer add, minus, etc. instructions effect the
3034 condition codes. This MUST be consistent with i386.md.
3035
3036 We don't record any float test or compare - the redundant test &
3037 compare check in final.c does not handle stack-like regs correctly. */
3038
3039 void
3040 notice_update_cc (exp)
3041 rtx exp;
3042 {
3043 if (GET_CODE (exp) == SET)
3044 {
3045 /* Jumps do not alter the cc's. */
3046 if (SET_DEST (exp) == pc_rtx)
3047 return;
3048 #ifdef IS_STACK_MODE
3049 /* Moving into a memory of stack_mode may have been moved
3050 in between the use and set of cc0 by loop_spl(). So
3051 old value of cc.status must be retained */
3052 if(GET_CODE(SET_DEST(exp))==MEM
3053 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3054 {
3055 return;
3056 }
3057 #endif
3058 /* Moving register or memory into a register:
3059 it doesn't alter the cc's, but it might invalidate
3060 the RTX's which we remember the cc's came from.
3061 (Note that moving a constant 0 or 1 MAY set the cc's). */
3062 if (REG_P (SET_DEST (exp))
3063 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3064 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3065 {
3066 if (cc_status.value1
3067 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3068 cc_status.value1 = 0;
3069 if (cc_status.value2
3070 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3071 cc_status.value2 = 0;
3072 return;
3073 }
3074 /* Moving register into memory doesn't alter the cc's.
3075 It may invalidate the RTX's which we remember the cc's came from. */
3076 if (GET_CODE (SET_DEST (exp)) == MEM
3077 && (REG_P (SET_SRC (exp))
3078 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3079 {
3080 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3081 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3082 cc_status.value1 = 0;
3083 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3084 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3085 cc_status.value2 = 0;
3086 return;
3087 }
3088 /* Function calls clobber the cc's. */
3089 else if (GET_CODE (SET_SRC (exp)) == CALL)
3090 {
3091 CC_STATUS_INIT;
3092 return;
3093 }
3094 /* Tests and compares set the cc's in predictable ways. */
3095 else if (SET_DEST (exp) == cc0_rtx)
3096 {
3097 CC_STATUS_INIT;
3098 cc_status.value1 = SET_SRC (exp);
3099 return;
3100 }
3101 /* Certain instructions effect the condition codes. */
3102 else if (GET_MODE (SET_SRC (exp)) == SImode
3103 || GET_MODE (SET_SRC (exp)) == HImode
3104 || GET_MODE (SET_SRC (exp)) == QImode)
3105 switch (GET_CODE (SET_SRC (exp)))
3106 {
3107 case ASHIFTRT: case LSHIFTRT:
3108 case ASHIFT:
3109 /* Shifts on the 386 don't set the condition codes if the
3110 shift count is zero. */
3111 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3112 {
3113 CC_STATUS_INIT;
3114 break;
3115 }
3116 /* We assume that the CONST_INT is non-zero (this rtx would
3117 have been deleted if it were zero. */
3118
3119 case PLUS: case MINUS: case NEG:
3120 case AND: case IOR: case XOR:
3121 cc_status.flags = CC_NO_OVERFLOW;
3122 cc_status.value1 = SET_SRC (exp);
3123 cc_status.value2 = SET_DEST (exp);
3124 break;
3125
3126 default:
3127 CC_STATUS_INIT;
3128 }
3129 else
3130 {
3131 CC_STATUS_INIT;
3132 }
3133 }
3134 else if (GET_CODE (exp) == PARALLEL
3135 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3136 {
3137 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3138 return;
3139 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3140 {
3141 CC_STATUS_INIT;
3142 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3143 cc_status.flags |= CC_IN_80387;
3144 else
3145 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3146 return;
3147 }
3148 CC_STATUS_INIT;
3149 }
3150 else
3151 {
3152 CC_STATUS_INIT;
3153 }
3154 }
3155 \f
3156 /* Split one or more DImode RTL references into pairs of SImode
3157 references. The RTL can be REG, offsettable MEM, integer constant, or
3158 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3159 split and "num" is its length. lo_half and hi_half are output arrays
3160 that parallel "operands". */
3161
3162 void
3163 split_di (operands, num, lo_half, hi_half)
3164 rtx operands[];
3165 int num;
3166 rtx lo_half[], hi_half[];
3167 {
3168 while (num--)
3169 {
3170 if (GET_CODE (operands[num]) == REG)
3171 {
3172 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3173 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3174 }
3175 else if (CONSTANT_P (operands[num]))
3176 {
3177 split_double (operands[num], &lo_half[num], &hi_half[num]);
3178 }
3179 else if (offsettable_memref_p (operands[num]))
3180 {
3181 lo_half[num] = operands[num];
3182 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3183 }
3184 else
3185 abort();
3186 }
3187 }
3188 \f
3189 /* Return 1 if this is a valid binary operation on a 387.
3190 OP is the expression matched, and MODE is its mode. */
3191
3192 int
3193 binary_387_op (op, mode)
3194 register rtx op;
3195 enum machine_mode mode;
3196 {
3197 if (mode != VOIDmode && mode != GET_MODE (op))
3198 return 0;
3199
3200 switch (GET_CODE (op))
3201 {
3202 case PLUS:
3203 case MINUS:
3204 case MULT:
3205 case DIV:
3206 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3207
3208 default:
3209 return 0;
3210 }
3211 }
3212
3213 \f
3214 /* Return 1 if this is a valid shift or rotate operation on a 386.
3215 OP is the expression matched, and MODE is its mode. */
3216
3217 int
3218 shift_op (op, mode)
3219 register rtx op;
3220 enum machine_mode mode;
3221 {
3222 rtx operand = XEXP (op, 0);
3223
3224 if (mode != VOIDmode && mode != GET_MODE (op))
3225 return 0;
3226
3227 if (GET_MODE (operand) != GET_MODE (op)
3228 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3229 return 0;
3230
3231 return (GET_CODE (op) == ASHIFT
3232 || GET_CODE (op) == ASHIFTRT
3233 || GET_CODE (op) == LSHIFTRT
3234 || GET_CODE (op) == ROTATE
3235 || GET_CODE (op) == ROTATERT);
3236 }
3237
3238 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3239 MODE is not used. */
3240
3241 int
3242 VOIDmode_compare_op (op, mode)
3243 register rtx op;
3244 enum machine_mode mode;
3245 {
3246 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3247 }
3248 \f
3249 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3250 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3251 is the expression of the binary operation. The output may either be
3252 emitted here, or returned to the caller, like all output_* functions.
3253
3254 There is no guarantee that the operands are the same mode, as they
3255 might be within FLOAT or FLOAT_EXTEND expressions. */
3256
3257 char *
3258 output_387_binary_op (insn, operands)
3259 rtx insn;
3260 rtx *operands;
3261 {
3262 rtx temp;
3263 char *base_op;
3264 static char buf[100];
3265
3266 switch (GET_CODE (operands[3]))
3267 {
3268 case PLUS:
3269 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3270 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3271 base_op = "fiadd";
3272 else
3273 base_op = "fadd";
3274 break;
3275
3276 case MINUS:
3277 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3278 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3279 base_op = "fisub";
3280 else
3281 base_op = "fsub";
3282 break;
3283
3284 case MULT:
3285 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3286 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3287 base_op = "fimul";
3288 else
3289 base_op = "fmul";
3290 break;
3291
3292 case DIV:
3293 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3294 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3295 base_op = "fidiv";
3296 else
3297 base_op = "fdiv";
3298 break;
3299
3300 default:
3301 abort ();
3302 }
3303
3304 strcpy (buf, base_op);
3305
3306 switch (GET_CODE (operands[3]))
3307 {
3308 case MULT:
3309 case PLUS:
3310 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3311 {
3312 temp = operands[2];
3313 operands[2] = operands[1];
3314 operands[1] = temp;
3315 }
3316
3317 if (GET_CODE (operands[2]) == MEM)
3318 return strcat (buf, AS1 (%z2,%2));
3319
3320 if (NON_STACK_REG_P (operands[1]))
3321 {
3322 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3323 RET;
3324 }
3325 else if (NON_STACK_REG_P (operands[2]))
3326 {
3327 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3328 RET;
3329 }
3330
3331 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3332 return strcat (buf, AS2 (p,%2,%0));
3333
3334 if (STACK_TOP_P (operands[0]))
3335 return strcat (buf, AS2C (%y2,%0));
3336 else
3337 return strcat (buf, AS2C (%2,%0));
3338
3339 case MINUS:
3340 case DIV:
3341 if (GET_CODE (operands[1]) == MEM)
3342 return strcat (buf, AS1 (r%z1,%1));
3343
3344 if (GET_CODE (operands[2]) == MEM)
3345 return strcat (buf, AS1 (%z2,%2));
3346
3347 if (NON_STACK_REG_P (operands[1]))
3348 {
3349 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3350 RET;
3351 }
3352 else if (NON_STACK_REG_P (operands[2]))
3353 {
3354 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3355 RET;
3356 }
3357
3358 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3359 abort ();
3360
3361 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3362 return strcat (buf, AS2 (rp,%2,%0));
3363
3364 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3365 return strcat (buf, AS2 (p,%1,%0));
3366
3367 if (STACK_TOP_P (operands[0]))
3368 {
3369 if (STACK_TOP_P (operands[1]))
3370 return strcat (buf, AS2C (%y2,%0));
3371 else
3372 return strcat (buf, AS2 (r,%y1,%0));
3373 }
3374 else if (STACK_TOP_P (operands[1]))
3375 return strcat (buf, AS2C (%1,%0));
3376 else
3377 return strcat (buf, AS2 (r,%2,%0));
3378
3379 default:
3380 abort ();
3381 }
3382 }
3383 \f
3384 /* Output code for INSN to convert a float to a signed int. OPERANDS
3385 are the insn operands. The output may be SFmode or DFmode and the
3386 input operand may be SImode or DImode. As a special case, make sure
3387 that the 387 stack top dies if the output mode is DImode, because the
3388 hardware requires this. */
3389
3390 char *
3391 output_fix_trunc (insn, operands)
3392 rtx insn;
3393 rtx *operands;
3394 {
3395 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3396 rtx xops[2];
3397
3398 if (! STACK_TOP_P (operands[1]) ||
3399 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3400 abort ();
3401
3402 xops[0] = GEN_INT (12);
3403 xops[1] = operands[4];
3404
3405 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3406 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3407 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3408 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3409 output_asm_insn (AS1 (fldc%W3,%3), operands);
3410
3411 if (NON_STACK_REG_P (operands[0]))
3412 output_to_reg (operands[0], stack_top_dies);
3413 else if (GET_CODE (operands[0]) == MEM)
3414 {
3415 if (stack_top_dies)
3416 output_asm_insn (AS1 (fistp%z0,%0), operands);
3417 else
3418 output_asm_insn (AS1 (fist%z0,%0), operands);
3419 }
3420 else
3421 abort ();
3422
3423 return AS1 (fldc%W2,%2);
3424 }
3425 \f
3426 /* Output code for INSN to compare OPERANDS. The two operands might
3427 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3428 expression. If the compare is in mode CCFPEQmode, use an opcode that
3429 will not fault if a qNaN is present. */
3430
3431 char *
3432 output_float_compare (insn, operands)
3433 rtx insn;
3434 rtx *operands;
3435 {
3436 int stack_top_dies;
3437 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3438 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3439
3440 rtx tmp;
3441 if (! STACK_TOP_P (operands[0]))
3442 {
3443 tmp = operands[0];
3444 operands[0] = operands[1];
3445 operands[1] = tmp;
3446 cc_status.flags |= CC_REVERSED;
3447 }
3448
3449 if (! STACK_TOP_P (operands[0]))
3450 abort ();
3451
3452 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3453
3454 if (STACK_REG_P (operands[1])
3455 && stack_top_dies
3456 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3457 && REGNO (operands[1]) != FIRST_STACK_REG)
3458 {
3459 /* If both the top of the 387 stack dies, and the other operand
3460 is also a stack register that dies, then this must be a
3461 `fcompp' float compare */
3462
3463 if (unordered_compare)
3464 output_asm_insn ("fucompp", operands);
3465 else
3466 output_asm_insn ("fcompp", operands);
3467 }
3468 else
3469 {
3470 static char buf[100];
3471
3472 /* Decide if this is the integer or float compare opcode, or the
3473 unordered float compare. */
3474
3475 if (unordered_compare)
3476 strcpy (buf, "fucom");
3477 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3478 strcpy (buf, "fcom");
3479 else
3480 strcpy (buf, "ficom");
3481
3482 /* Modify the opcode if the 387 stack is to be popped. */
3483
3484 if (stack_top_dies)
3485 strcat (buf, "p");
3486
3487 if (NON_STACK_REG_P (operands[1]))
3488 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3489 else
3490 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3491 }
3492
3493 /* Now retrieve the condition code. */
3494
3495 return output_fp_cc0_set (insn);
3496 }
3497 \f
3498 /* Output opcodes to transfer the results of FP compare or test INSN
3499 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3500 result of the compare or test is unordered, no comparison operator
3501 succeeds except NE. Return an output template, if any. */
3502
3503 char *
3504 output_fp_cc0_set (insn)
3505 rtx insn;
3506 {
3507 rtx xops[3];
3508 rtx unordered_label;
3509 rtx next;
3510 enum rtx_code code;
3511
3512 xops[0] = gen_rtx (REG, HImode, 0);
3513 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3514
3515 if (! TARGET_IEEE_FP)
3516 {
3517 if (!(cc_status.flags & CC_REVERSED))
3518 {
3519 next = next_cc0_user (insn);
3520
3521 if (GET_CODE (next) == JUMP_INSN
3522 && GET_CODE (PATTERN (next)) == SET
3523 && SET_DEST (PATTERN (next)) == pc_rtx
3524 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3525 {
3526 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3527 }
3528 else if (GET_CODE (PATTERN (next)) == SET)
3529 {
3530 code = GET_CODE (SET_SRC (PATTERN (next)));
3531 }
3532 else
3533 {
3534 return "sahf";
3535 }
3536 if (code == GT || code == LT || code == EQ || code == NE
3537 || code == LE || code == GE)
3538 { /* We will test eax directly */
3539 cc_status.flags |= CC_TEST_AX;
3540 RET;
3541 }
3542 }
3543 return "sahf";
3544 }
3545
3546 next = next_cc0_user (insn);
3547 if (next == NULL_RTX)
3548 abort ();
3549
3550 if (GET_CODE (next) == JUMP_INSN
3551 && GET_CODE (PATTERN (next)) == SET
3552 && SET_DEST (PATTERN (next)) == pc_rtx
3553 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3554 {
3555 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3556 }
3557 else if (GET_CODE (PATTERN (next)) == SET)
3558 {
3559 code = GET_CODE (SET_SRC (PATTERN (next)));
3560 }
3561 else
3562 abort ();
3563
3564 xops[0] = gen_rtx (REG, QImode, 0);
3565
3566 switch (code)
3567 {
3568 case GT:
3569 xops[1] = GEN_INT (0x45);
3570 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3571 /* je label */
3572 break;
3573
3574 case LT:
3575 xops[1] = GEN_INT (0x45);
3576 xops[2] = GEN_INT (0x01);
3577 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3578 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3579 /* je label */
3580 break;
3581
3582 case GE:
3583 xops[1] = GEN_INT (0x05);
3584 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3585 /* je label */
3586 break;
3587
3588 case LE:
3589 xops[1] = GEN_INT (0x45);
3590 xops[2] = GEN_INT (0x40);
3591 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3592 output_asm_insn (AS1 (dec%B0,%h0), xops);
3593 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3594 /* jb label */
3595 break;
3596
3597 case EQ:
3598 xops[1] = GEN_INT (0x45);
3599 xops[2] = GEN_INT (0x40);
3600 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3601 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3602 /* je label */
3603 break;
3604
3605 case NE:
3606 xops[1] = GEN_INT (0x44);
3607 xops[2] = GEN_INT (0x40);
3608 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3609 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3610 /* jne label */
3611 break;
3612
3613 case GTU:
3614 case LTU:
3615 case GEU:
3616 case LEU:
3617 default:
3618 abort ();
3619 }
3620 RET;
3621 }
3622 \f
3623 #define MAX_386_STACK_LOCALS 2
3624
3625 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3626
3627 /* Define the structure for the machine field in struct function. */
3628 struct machine_function
3629 {
3630 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3631 };
3632
3633 /* Functions to save and restore i386_stack_locals.
3634 These will be called, via pointer variables,
3635 from push_function_context and pop_function_context. */
3636
3637 void
3638 save_386_machine_status (p)
3639 struct function *p;
3640 {
3641 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3642 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3643 sizeof i386_stack_locals);
3644 }
3645
3646 void
3647 restore_386_machine_status (p)
3648 struct function *p;
3649 {
3650 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3651 sizeof i386_stack_locals);
3652 free (p->machine);
3653 }
3654
3655 /* Clear stack slot assignments remembered from previous functions.
3656 This is called from INIT_EXPANDERS once before RTL is emitted for each
3657 function. */
3658
3659 void
3660 clear_386_stack_locals ()
3661 {
3662 enum machine_mode mode;
3663 int n;
3664
3665 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3666 mode = (enum machine_mode) ((int) mode + 1))
3667 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3668 i386_stack_locals[(int) mode][n] = NULL_RTX;
3669
3670 /* Arrange to save and restore i386_stack_locals around nested functions. */
3671 save_machine_status = save_386_machine_status;
3672 restore_machine_status = restore_386_machine_status;
3673 }
3674
3675 /* Return a MEM corresponding to a stack slot with mode MODE.
3676 Allocate a new slot if necessary.
3677
3678 The RTL for a function can have several slots available: N is
3679 which slot to use. */
3680
3681 rtx
3682 assign_386_stack_local (mode, n)
3683 enum machine_mode mode;
3684 int n;
3685 {
3686 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3687 abort ();
3688
3689 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3690 i386_stack_locals[(int) mode][n]
3691 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3692
3693 return i386_stack_locals[(int) mode][n];
3694 }
3695
3696 \f
3697 int is_mul(op,mode)
3698 register rtx op;
3699 enum machine_mode mode;
3700 {
3701 return (GET_CODE (op) == MULT);
3702 }
3703
3704 int is_div(op,mode)
3705 register rtx op;
3706 enum machine_mode mode;
3707 {
3708 return (GET_CODE (op) == DIV);
3709 }
3710
3711 \f
3712 #ifdef NOTYET
3713 /* Create a new copy of an rtx.
3714 Recursively copies the operands of the rtx,
3715 except for those few rtx codes that are sharable.
3716 Doesn't share CONST */
3717
3718 rtx
3719 copy_all_rtx (orig)
3720 register rtx orig;
3721 {
3722 register rtx copy;
3723 register int i, j;
3724 register RTX_CODE code;
3725 register char *format_ptr;
3726
3727 code = GET_CODE (orig);
3728
3729 switch (code)
3730 {
3731 case REG:
3732 case QUEUED:
3733 case CONST_INT:
3734 case CONST_DOUBLE:
3735 case SYMBOL_REF:
3736 case CODE_LABEL:
3737 case PC:
3738 case CC0:
3739 case SCRATCH:
3740 /* SCRATCH must be shared because they represent distinct values. */
3741 return orig;
3742
3743 #if 0
3744 case CONST:
3745 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3746 a LABEL_REF, it isn't sharable. */
3747 if (GET_CODE (XEXP (orig, 0)) == PLUS
3748 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3749 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3750 return orig;
3751 break;
3752 #endif
3753 /* A MEM with a constant address is not sharable. The problem is that
3754 the constant address may need to be reloaded. If the mem is shared,
3755 then reloading one copy of this mem will cause all copies to appear
3756 to have been reloaded. */
3757 }
3758
3759 copy = rtx_alloc (code);
3760 PUT_MODE (copy, GET_MODE (orig));
3761 copy->in_struct = orig->in_struct;
3762 copy->volatil = orig->volatil;
3763 copy->unchanging = orig->unchanging;
3764 copy->integrated = orig->integrated;
3765 /* intel1 */
3766 copy->is_spill_rtx = orig->is_spill_rtx;
3767
3768 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3769
3770 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3771 {
3772 switch (*format_ptr++)
3773 {
3774 case 'e':
3775 XEXP (copy, i) = XEXP (orig, i);
3776 if (XEXP (orig, i) != NULL)
3777 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3778 break;
3779
3780 case '0':
3781 case 'u':
3782 XEXP (copy, i) = XEXP (orig, i);
3783 break;
3784
3785 case 'E':
3786 case 'V':
3787 XVEC (copy, i) = XVEC (orig, i);
3788 if (XVEC (orig, i) != NULL)
3789 {
3790 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3791 for (j = 0; j < XVECLEN (copy, i); j++)
3792 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3793 }
3794 break;
3795
3796 case 'w':
3797 XWINT (copy, i) = XWINT (orig, i);
3798 break;
3799
3800 case 'i':
3801 XINT (copy, i) = XINT (orig, i);
3802 break;
3803
3804 case 's':
3805 case 'S':
3806 XSTR (copy, i) = XSTR (orig, i);
3807 break;
3808
3809 default:
3810 abort ();
3811 }
3812 }
3813 return copy;
3814 }
3815
3816 \f
3817 /* try to rewrite a memory address to make it valid */
3818 void
3819 rewrite_address (mem_rtx)
3820 rtx mem_rtx;
3821 {
3822 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
3823 int scale = 1;
3824 int offset_adjust = 0;
3825 int was_only_offset = 0;
3826 rtx mem_addr = XEXP (mem_rtx, 0);
3827 char *storage = (char *) oballoc (0);
3828 int in_struct = 0;
3829 int is_spill_rtx = 0;
3830
3831 in_struct = MEM_IN_STRUCT_P (mem_rtx);
3832 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
3833
3834 if (GET_CODE (mem_addr) == PLUS &&
3835 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
3836 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
3837 { /* this part is utilized by the combiner */
3838 ret_rtx =
3839 gen_rtx (PLUS, GET_MODE (mem_addr),
3840 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
3841 XEXP (mem_addr, 0),
3842 XEXP (XEXP (mem_addr, 1), 0)),
3843 XEXP (XEXP (mem_addr, 1), 1));
3844 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
3845 {
3846 XEXP (mem_rtx, 0) = ret_rtx;
3847 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
3848 return;
3849 }
3850 obfree (storage);
3851 }
3852
3853 /* this part is utilized by loop.c */
3854 /* If the address contains PLUS (reg,const) and this pattern is invalid
3855 in this case - try to rewrite the address to make it valid intel1
3856 */
3857 storage = (char *) oballoc (0);
3858 index_rtx = base_rtx = offset_rtx = NULL;
3859 /* find the base index and offset elements of the memory address */
3860 if (GET_CODE (mem_addr) == PLUS)
3861 {
3862 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
3863 {
3864 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3865 {
3866 base_rtx = XEXP (mem_addr, 1);
3867 index_rtx = XEXP (mem_addr, 0);
3868 }
3869 else
3870 {
3871 base_rtx = XEXP (mem_addr, 0);
3872 offset_rtx = XEXP (mem_addr, 1);
3873 }
3874 }
3875 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
3876 {
3877 index_rtx = XEXP (mem_addr, 0);
3878 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3879 {
3880 base_rtx = XEXP (mem_addr, 1);
3881 }
3882 else
3883 {
3884 offset_rtx = XEXP (mem_addr, 1);
3885 }
3886 }
3887 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
3888 {
3889 /* intel1 */
3890 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
3891 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
3892 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
3893 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
3894 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
3895 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
3896 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
3897 {
3898 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
3899 offset_rtx = XEXP (mem_addr, 1);
3900 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3901 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
3902 }
3903 else
3904 {
3905 offset_rtx = XEXP (mem_addr, 1);
3906 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
3907 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3908 }
3909 }
3910 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
3911 {
3912 was_only_offset = 1;
3913 index_rtx = NULL;
3914 base_rtx = NULL;
3915 offset_rtx = XEXP (mem_addr, 1);
3916 offset_adjust = INTVAL (XEXP (mem_addr, 0));
3917 if (offset_adjust == 0)
3918 {
3919 XEXP (mem_rtx, 0) = offset_rtx;
3920 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
3921 return;
3922 }
3923 }
3924 else
3925 {
3926 obfree (storage);
3927 return;
3928 }
3929 }
3930 else if (GET_CODE (mem_addr) == MULT)
3931 {
3932 index_rtx = mem_addr;
3933 }
3934 else
3935 {
3936 obfree (storage);
3937 return;
3938 }
3939 if (index_rtx && GET_CODE (index_rtx) == MULT)
3940 {
3941 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
3942 {
3943 obfree (storage);
3944 return;
3945 }
3946 scale_rtx = XEXP (index_rtx, 1);
3947 scale = INTVAL (scale_rtx);
3948 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
3949 }
3950 /* now find which of the elements are invalid and try to fix them */
3951 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
3952 {
3953 offset_adjust = INTVAL (index_rtx) * scale;
3954 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
3955 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
3956 {
3957 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
3958 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
3959 {
3960 offset_rtx = copy_all_rtx (offset_rtx);
3961 XEXP (XEXP (offset_rtx, 0), 1) =
3962 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
3963 if (!CONSTANT_P (offset_rtx))
3964 {
3965 obfree (storage);
3966 return;
3967 }
3968 }
3969 }
3970 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
3971 {
3972 offset_rtx =
3973 gen_rtx (CONST, GET_MODE (offset_rtx),
3974 gen_rtx (PLUS, GET_MODE (offset_rtx),
3975 offset_rtx,
3976 gen_rtx (CONST_INT, 0, offset_adjust)));
3977 if (!CONSTANT_P (offset_rtx))
3978 {
3979 obfree (storage);
3980 return;
3981 }
3982 }
3983 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
3984 {
3985 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
3986 }
3987 else if (!offset_rtx)
3988 {
3989 offset_rtx = gen_rtx (CONST_INT, 0, 0);
3990 }
3991 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
3992 XEXP (mem_rtx, 0) = offset_rtx;
3993 return;
3994 }
3995 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
3996 GET_CODE (XEXP (base_rtx, 0)) == REG &&
3997 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
3998 {
3999 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4000 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4001 }
4002 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4003 {
4004 offset_adjust += INTVAL (base_rtx);
4005 base_rtx = NULL;
4006 }
4007 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4008 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4009 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4010 {
4011 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4012 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4013 }
4014 if (index_rtx)
4015 {
4016 if (!LEGITIMATE_INDEX_P (index_rtx)
4017 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4018 {
4019 obfree (storage);
4020 return;
4021 }
4022 }
4023 if (base_rtx)
4024 {
4025 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4026 {
4027 obfree (storage);
4028 return;
4029 }
4030 }
4031 if (offset_adjust != 0)
4032 {
4033 if (offset_rtx)
4034 {
4035 if (GET_CODE (offset_rtx) == CONST &&
4036 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4037 {
4038 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4039 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4040 {
4041 offset_rtx = copy_all_rtx (offset_rtx);
4042 XEXP (XEXP (offset_rtx, 0), 1) =
4043 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4044 if (!CONSTANT_P (offset_rtx))
4045 {
4046 obfree (storage);
4047 return;
4048 }
4049 }
4050 }
4051 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4052 {
4053 offset_rtx =
4054 gen_rtx (CONST, GET_MODE (offset_rtx),
4055 gen_rtx (PLUS, GET_MODE (offset_rtx),
4056 offset_rtx,
4057 gen_rtx (CONST_INT, 0, offset_adjust)));
4058 if (!CONSTANT_P (offset_rtx))
4059 {
4060 obfree (storage);
4061 return;
4062 }
4063 }
4064 else if (GET_CODE (offset_rtx) == CONST_INT)
4065 {
4066 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4067 }
4068 else
4069 {
4070 obfree (storage);
4071 return;
4072 }
4073 }
4074 else
4075 {
4076 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4077 }
4078 if (index_rtx)
4079 {
4080 if (base_rtx)
4081 {
4082 if (scale != 1)
4083 {
4084 if (GET_CODE (offset_rtx) == CONST_INT &&
4085 INTVAL (offset_rtx) == 0)
4086 {
4087 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4088 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4089 scale_rtx),
4090 base_rtx);
4091 }
4092 else
4093 {
4094 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4095 gen_rtx (PLUS, GET_MODE (base_rtx),
4096 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4097 scale_rtx),
4098 base_rtx),
4099 offset_rtx);
4100 }
4101 }
4102 else
4103 {
4104 if (GET_CODE (offset_rtx) == CONST_INT &&
4105 INTVAL (offset_rtx) == 0)
4106 {
4107 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4108 }
4109 else
4110 {
4111 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4112 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4113 base_rtx),
4114 offset_rtx);
4115 }
4116 }
4117 }
4118 else
4119 {
4120 if (scale != 1)
4121 {
4122 if (GET_CODE (offset_rtx) == CONST_INT &&
4123 INTVAL (offset_rtx) == 0)
4124 {
4125 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4126 }
4127 else
4128 {
4129 ret_rtx =
4130 gen_rtx (PLUS, GET_MODE (offset_rtx),
4131 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4132 scale_rtx),
4133 offset_rtx);
4134 }
4135 }
4136 else
4137 {
4138 if (GET_CODE (offset_rtx) == CONST_INT &&
4139 INTVAL (offset_rtx) == 0)
4140 {
4141 ret_rtx = index_rtx;
4142 }
4143 else
4144 {
4145 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4146 }
4147 }
4148 }
4149 }
4150 else
4151 {
4152 if (base_rtx)
4153 {
4154 if (GET_CODE (offset_rtx) == CONST_INT &&
4155 INTVAL (offset_rtx) == 0)
4156 {
4157 ret_rtx = base_rtx;
4158 }
4159 else
4160 {
4161 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4162 }
4163 }
4164 else if (was_only_offset)
4165 {
4166 ret_rtx = offset_rtx;
4167 }
4168 else
4169 {
4170 obfree (storage);
4171 return;
4172 }
4173 }
4174 XEXP (mem_rtx, 0) = ret_rtx;
4175 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4176 return;
4177 }
4178 else
4179 {
4180 obfree (storage);
4181 return;
4182 }
4183 }
4184 #endif /* NOTYET */
4185
4186 \f
4187 /* return 1 if the first insn to set cc before insn also sets the register
4188 reg_rtx - otherwise return 0 */
4189 int
4190 last_to_set_cc (reg_rtx, insn)
4191 rtx reg_rtx, insn;
4192 {
4193 rtx prev_insn = PREV_INSN (insn);
4194
4195 while (prev_insn)
4196 {
4197 if (GET_CODE (prev_insn) == NOTE)
4198 ;
4199
4200 else if (GET_CODE (prev_insn) == INSN)
4201 {
4202 if (GET_CODE (PATTERN (prev_insn)) != SET)
4203 return (0);
4204
4205 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4206 {
4207 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4208 return (1);
4209
4210 return (0);
4211 }
4212
4213 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4214 return (0);
4215 }
4216
4217 else
4218 return (0);
4219
4220 prev_insn = PREV_INSN (prev_insn);
4221 }
4222
4223 return (0);
4224 }
4225
4226 \f
4227 int
4228 doesnt_set_condition_code (pat)
4229 rtx pat;
4230 {
4231 switch (GET_CODE (pat))
4232 {
4233 case MEM:
4234 case REG:
4235 return (1);
4236
4237 default:
4238 return (0);
4239
4240 }
4241 }
4242
4243 \f
4244 int
4245 sets_condition_code (pat)
4246 rtx pat;
4247 {
4248 switch (GET_CODE (pat))
4249 {
4250 case PLUS:
4251 case MINUS:
4252 case AND:
4253 case IOR:
4254 case XOR:
4255 case NOT:
4256 case NEG:
4257 case MULT:
4258 case DIV:
4259 case MOD:
4260 case UDIV:
4261 case UMOD:
4262 return (1);
4263
4264 default:
4265 return (0);
4266
4267 }
4268 }
4269
4270 \f
4271 int
4272 str_immediate_operand (op, mode)
4273 register rtx op;
4274 enum machine_mode mode;
4275 {
4276 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4277 {
4278 return (1);
4279 }
4280 return (0);
4281 }
4282
4283 \f
4284 int
4285 is_fp_insn (insn)
4286 rtx insn;
4287 {
4288 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4289 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4290 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4291 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4292 {
4293 return (1);
4294 }
4295
4296 return (0);
4297 }
4298
4299 /*
4300 Return 1 if the mode of the SET_DEST of insn is floating point
4301 and it is not an fld or a move from memory to memory.
4302 Otherwise return 0 */
4303 int
4304 is_fp_dest (insn)
4305 rtx insn;
4306 {
4307 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4308 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4309 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4310 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4311 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4312 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4313 && GET_CODE (SET_SRC (insn)) != MEM)
4314 {
4315 return (1);
4316 }
4317
4318 return (0);
4319 }
4320
4321 /*
4322 Return 1 if the mode of the SET_DEST floating point and is memory
4323 and the source is a register.
4324 */
4325 int
4326 is_fp_store (insn)
4327 rtx insn;
4328 {
4329 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4330 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4331 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4332 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4333 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4334 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4335 {
4336 return (1);
4337 }
4338
4339 return (0);
4340 }
4341
4342 \f
4343 /*
4344 Return 1 if dep_insn sets a register which insn uses as a base
4345 or index to reference memory.
4346 otherwise return 0 */
4347
4348 int
4349 agi_dependent (insn, dep_insn)
4350 rtx insn, dep_insn;
4351 {
4352 if (GET_CODE (dep_insn) == INSN
4353 && GET_CODE (PATTERN (dep_insn)) == SET
4354 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4355 {
4356 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4357 }
4358
4359 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4360 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4361 && push_operand (SET_DEST (PATTERN (dep_insn)),
4362 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4363 {
4364 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4365 }
4366
4367 return (0);
4368 }
4369
4370 \f
4371 /*
4372 Return 1 if reg is used in rtl as a base or index for a memory ref
4373 otherwise return 0. */
4374
4375 int
4376 reg_mentioned_in_mem (reg, rtl)
4377 rtx reg, rtl;
4378 {
4379 register char *fmt;
4380 register int i;
4381 register enum rtx_code code;
4382
4383 if (rtl == NULL)
4384 return (0);
4385
4386 code = GET_CODE (rtl);
4387
4388 switch (code)
4389 {
4390 case HIGH:
4391 case CONST_INT:
4392 case CONST:
4393 case CONST_DOUBLE:
4394 case SYMBOL_REF:
4395 case LABEL_REF:
4396 case PC:
4397 case CC0:
4398 case SUBREG:
4399 return (0);
4400
4401
4402 }
4403
4404 if (code == MEM && reg_mentioned_p (reg, rtl))
4405 return (1);
4406
4407 fmt = GET_RTX_FORMAT (code);
4408 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4409 {
4410 if (fmt[i] == 'E')
4411 {
4412 register int j;
4413 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4414 {
4415 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4416 return 1;
4417 }
4418 }
4419
4420 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4421 return 1;
4422 }
4423
4424 return (0);
4425 }
4426 \f
4427 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4428
4429 operands[0] = result, initialized with the startaddress
4430 operands[1] = alignment of the address.
4431 operands[2] = scratch register, initialized with the startaddress when
4432 not aligned, otherwise undefined
4433
4434 This is just the body. It needs the initialisations mentioned above and
4435 some address computing at the end. These things are done in i386.md. */
4436
4437 char *
4438 output_strlen_unroll (operands)
4439 rtx operands[];
4440 {
4441 rtx xops[18];
4442
4443 xops[0] = operands[0]; /* Result */
4444 /* operands[1]; * Alignment */
4445 xops[1] = operands[2]; /* Scratch */
4446 xops[2] = GEN_INT (0);
4447 xops[3] = GEN_INT (2);
4448 xops[4] = GEN_INT (3);
4449 xops[5] = GEN_INT (4);
4450 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4451 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4452 xops[8] = gen_label_rtx (); /* label of main loop */
4453 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4454 xops[9] = gen_label_rtx (); /* pentium optimisation */
4455 xops[10] = gen_label_rtx (); /* end label 2 */
4456 xops[11] = gen_label_rtx (); /* end label 1 */
4457 xops[12] = gen_label_rtx (); /* end label */
4458 /* xops[13] * Temporary used */
4459 xops[14] = GEN_INT (0xff);
4460 xops[15] = GEN_INT (0xff00);
4461 xops[16] = GEN_INT (0xff0000);
4462 xops[17] = GEN_INT (0xff000000);
4463
4464 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4465
4466 /* is there a known alignment and is it less then 4 */
4467 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4468 {
4469 /* is there a known alignment and is it not 2 */
4470 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4471 {
4472 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4473 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4474
4475 /* leave just the 3 lower bits */
4476 /* if this is a q-register, then the high part is used later */
4477 /* therefore user andl rather than andb */
4478 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4479 /* is aligned to 4-byte adress when zero */
4480 output_asm_insn (AS1 (je,%l8), xops);
4481 /* side-effect even Parity when %eax == 3 */
4482 output_asm_insn (AS1 (jp,%6), xops);
4483
4484 /* is it aligned to 2 bytes ? */
4485 if (QI_REG_P (xops[1]))
4486 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4487 else
4488 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4489 output_asm_insn (AS1 (je,%7), xops);
4490 }
4491 else
4492 {
4493 /* since the alignment is 2, we have to check 2 or 0 bytes */
4494
4495 /* check if is aligned to 4 - byte */
4496 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4497 /* is aligned to 4-byte adress when zero */
4498 output_asm_insn (AS1 (je,%l8), xops);
4499 }
4500
4501 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4502 /* now, compare the bytes */
4503 /* compare with the high part of a q-reg gives shorter code */
4504 if (QI_REG_P (xops[1]))
4505 {
4506 /* compare the first n unaligned byte on a byte per byte basis */
4507 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4508 /* when zero we reached the end */
4509 output_asm_insn (AS1 (je,%l12), xops);
4510 /* increment the address */
4511 output_asm_insn (AS1 (inc%L0,%0), xops);
4512
4513 /* not needed with an alignment of 2 */
4514 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4515 {
4516 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4517 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4518 output_asm_insn (AS1 (je,%l12), xops);
4519 output_asm_insn (AS1 (inc%L0,%0), xops);
4520
4521 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4522 }
4523 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4524 }
4525 else
4526 {
4527 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4528 output_asm_insn (AS1 (je,%l12), xops);
4529 output_asm_insn (AS1 (inc%L0,%0), xops);
4530
4531 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4532 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4533 output_asm_insn (AS1 (je,%l12), xops);
4534 output_asm_insn (AS1 (inc%L0,%0), xops);
4535
4536 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4537 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4538 }
4539 output_asm_insn (AS1 (je,%l12), xops);
4540 output_asm_insn (AS1 (inc%L0,%0), xops);
4541 }
4542
4543 /* Generate loop to check 4 bytes at a time */
4544 /* IMHO it is not a good idea to align this loop. It gives only */
4545 /* huge programs, but does not help to speed up */
4546 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4547 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4548
4549 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4550 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4551
4552 if (QI_REG_P (xops[1]))
4553 {
4554 /* On i586 it is faster to combine the hi- and lo- part as
4555 a kind of lookahead. If anding both yields zero, then one
4556 of both *could* be zero, otherwise none of both is zero;
4557 this saves one instruction, on i486 this is slower
4558 tested with P-90, i486DX2-66, AMD486DX2-66 */
4559 if(TARGET_PENTIUM)
4560 {
4561 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4562 output_asm_insn (AS1 (jne,%l9), xops);
4563 }
4564
4565 /* check first byte */
4566 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4567 output_asm_insn (AS1 (je,%l12), xops);
4568
4569 /* check second byte */
4570 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4571 output_asm_insn (AS1 (je,%l11), xops);
4572
4573 if(TARGET_PENTIUM)
4574 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4575 }
4576 else
4577 {
4578 /* check first byte */
4579 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4580 output_asm_insn (AS1 (je,%l12), xops);
4581
4582 /* check second byte */
4583 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4584 output_asm_insn (AS1 (je,%l11), xops);
4585 }
4586
4587 /* check third byte */
4588 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4589 output_asm_insn (AS1 (je,%l10), xops);
4590
4591 /* check fourth byte and increment address */
4592 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4593 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4594 output_asm_insn (AS1 (jne,%l8), xops);
4595
4596 /* now generate fixups when the compare stops within a 4-byte word */
4597 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4598
4599 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4600 output_asm_insn (AS1 (inc%L0,%0), xops);
4601
4602 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4603 output_asm_insn (AS1 (inc%L0,%0), xops);
4604
4605 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
4606
4607 RET;
4608 }
This page took 0.252341 seconds and 6 git commands to generate.