]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
i386.c (output_fp_conditional_move): Don't bother handling (cc_prev_status.flags...
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <setjmp.h>
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "hard-reg-set.h"
27 #include "real.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "tree.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "toplev.h"
40
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
48 #endif
49
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
52 #endif
53
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
55 enum reg_mem
56 {
57 reg_p,
58 mem_p,
59 imm_p
60 };
61
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost = { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
71 };
72
73 struct processor_costs i486_cost = { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
81 };
82
83 struct processor_costs pentium_cost = {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
91 };
92
93 struct processor_costs pentiumpro_cost = {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
101 };
102
103 struct processor_costs *ix86_cost = &pentium_cost;
104
105 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
106
107 extern FILE *asm_out_file;
108 extern char *strcat ();
109
110 static void ix86_epilogue PROTO((int));
111 static void ix86_prologue PROTO((int));
112
113 char *singlemove_string ();
114 char *output_move_const_single ();
115 char *output_fp_cc0_set ();
116
117 char *hi_reg_name[] = HI_REGISTER_NAMES;
118 char *qi_reg_name[] = QI_REGISTER_NAMES;
119 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
120
121 /* Array of the smallest class containing reg number REGNO, indexed by
122 REGNO. Used by REGNO_REG_CLASS in i386.h. */
123
124 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
125 {
126 /* ax, dx, cx, bx */
127 AREG, DREG, CREG, BREG,
128 /* si, di, bp, sp */
129 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
130 /* FP registers */
131 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
132 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
133 /* arg pointer */
134 INDEX_REGS
135 };
136
137 /* Test and compare insns in i386.md store the information needed to
138 generate branch and scc insns here. */
139
140 struct rtx_def *i386_compare_op0 = NULL_RTX;
141 struct rtx_def *i386_compare_op1 = NULL_RTX;
142 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
143
144 /* which cpu are we scheduling for */
145 enum processor_type ix86_cpu;
146
147 /* which instruction set architecture to use. */
148 int ix86_arch;
149
150 /* Strings to hold which cpu and instruction set architecture to use. */
151 char *ix86_cpu_string; /* for -mcpu=<xxx> */
152 char *ix86_arch_string; /* for -march=<xxx> */
153
154 /* Register allocation order */
155 char *i386_reg_alloc_order;
156 static char regs_allocated[FIRST_PSEUDO_REGISTER];
157
158 /* # of registers to use to pass arguments. */
159 char *i386_regparm_string;
160
161 /* i386_regparm_string as a number */
162 int i386_regparm;
163
164 /* Alignment to use for loops and jumps: */
165
166 /* Power of two alignment for loops. */
167 char *i386_align_loops_string;
168
169 /* Power of two alignment for non-loop jumps. */
170 char *i386_align_jumps_string;
171
172 /* Values 1-5: see jump.c */
173 int i386_branch_cost;
174 char *i386_branch_cost_string;
175
176 /* Power of two alignment for functions. */
177 int i386_align_funcs;
178 char *i386_align_funcs_string;
179
180 /* Power of two alignment for loops. */
181 int i386_align_loops;
182
183 /* Power of two alignment for non-loop jumps. */
184 int i386_align_jumps;
185
186 /* Sometimes certain combinations of command options do not make
187 sense on a particular target machine. You can define a macro
188 `OVERRIDE_OPTIONS' to take account of this. This macro, if
189 defined, is executed once just after all the command options have
190 been parsed.
191
192 Don't use this macro to turn on various extra optimizations for
193 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
194
195 void
196 override_options ()
197 {
198 int ch, i, j;
199 int def_align;
200
201 static struct ptt
202 {
203 char *name; /* Canonical processor name. */
204 enum processor_type processor; /* Processor type enum value. */
205 struct processor_costs *cost; /* Processor costs */
206 int target_enable; /* Target flags to enable. */
207 int target_disable; /* Target flags to disable. */
208 } processor_target_table[]
209 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
210 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
211 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
212 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
213 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
214 0, 0},
215 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
216 &pentiumpro_cost, 0, 0}};
217
218 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
219
220 #ifdef SUBTARGET_OVERRIDE_OPTIONS
221 SUBTARGET_OVERRIDE_OPTIONS;
222 #endif
223
224 /* Validate registers in register allocation order. */
225 if (i386_reg_alloc_order)
226 {
227 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
228 {
229 int regno = 0;
230
231 switch (ch)
232 {
233 case 'a': regno = 0; break;
234 case 'd': regno = 1; break;
235 case 'c': regno = 2; break;
236 case 'b': regno = 3; break;
237 case 'S': regno = 4; break;
238 case 'D': regno = 5; break;
239 case 'B': regno = 6; break;
240
241 default: fatal ("Register '%c' is unknown", ch);
242 }
243
244 if (regs_allocated[regno])
245 fatal ("Register '%c' already specified in allocation order", ch);
246
247 regs_allocated[regno] = 1;
248 }
249 }
250
251 if (ix86_arch_string == 0)
252 {
253 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
254 if (ix86_cpu_string == 0)
255 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
256 }
257
258 for (i = 0; i < ptt_size; i++)
259 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
260 {
261 ix86_arch = processor_target_table[i].processor;
262 if (ix86_cpu_string == 0)
263 ix86_cpu_string = processor_target_table[i].name;
264 break;
265 }
266
267 if (i == ptt_size)
268 {
269 error ("bad value (%s) for -march= switch", ix86_arch_string);
270 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
271 ix86_arch = PROCESSOR_DEFAULT;
272 }
273
274 if (ix86_cpu_string == 0)
275 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
276
277 for (j = 0; j < ptt_size; j++)
278 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
279 {
280 ix86_cpu = processor_target_table[j].processor;
281 ix86_cost = processor_target_table[j].cost;
282 if (i > j && (int) ix86_arch >= (int) PROCESSOR_PENTIUMPRO)
283 error ("-mcpu=%s does not support -march=%s",
284 ix86_cpu_string, ix86_arch_string);
285
286 target_flags |= processor_target_table[j].target_enable;
287 target_flags &= ~processor_target_table[j].target_disable;
288 break;
289 }
290
291 if (j == ptt_size)
292 {
293 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
294 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
295 ix86_cpu = PROCESSOR_DEFAULT;
296 }
297
298 /* Validate -mregparm= value. */
299 if (i386_regparm_string)
300 {
301 i386_regparm = atoi (i386_regparm_string);
302 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
303 fatal ("-mregparm=%d is not between 0 and %d",
304 i386_regparm, REGPARM_MAX);
305 }
306
307 /* The 486 suffers more from non-aligned cache line fills, and the
308 larger code size results in a larger cache foot-print and more misses.
309 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
310 cache line. */
311 def_align = (TARGET_486) ? 4 : 2;
312
313 /* Validate -malign-loops= value, or provide default. */
314 if (i386_align_loops_string)
315 {
316 i386_align_loops = atoi (i386_align_loops_string);
317 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
318 fatal ("-malign-loops=%d is not between 0 and %d",
319 i386_align_loops, MAX_CODE_ALIGN);
320 }
321 else
322 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
323 i386_align_loops = 4;
324 #else
325 i386_align_loops = 2;
326 #endif
327
328 /* Validate -malign-jumps= value, or provide default. */
329 if (i386_align_jumps_string)
330 {
331 i386_align_jumps = atoi (i386_align_jumps_string);
332 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
333 fatal ("-malign-jumps=%d is not between 0 and %d",
334 i386_align_jumps, MAX_CODE_ALIGN);
335 }
336 else
337 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
338 i386_align_jumps = 4;
339 #else
340 i386_align_jumps = def_align;
341 #endif
342
343 /* Validate -malign-functions= value, or provide default. */
344 if (i386_align_funcs_string)
345 {
346 i386_align_funcs = atoi (i386_align_funcs_string);
347 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
348 fatal ("-malign-functions=%d is not between 0 and %d",
349 i386_align_funcs, MAX_CODE_ALIGN);
350 }
351 else
352 i386_align_funcs = def_align;
353
354 /* Validate -mbranch-cost= value, or provide default. */
355 if (i386_branch_cost_string)
356 {
357 i386_branch_cost = atoi (i386_branch_cost_string);
358 if (i386_branch_cost < 0 || i386_branch_cost > 5)
359 fatal ("-mbranch-cost=%d is not between 0 and 5",
360 i386_branch_cost);
361 }
362 else
363 i386_branch_cost = 1;
364
365 /* Keep nonleaf frame pointers. */
366 if (TARGET_OMIT_LEAF_FRAME_POINTER)
367 flag_omit_frame_pointer = 1;
368 }
369 \f
370 /* A C statement (sans semicolon) to choose the order in which to
371 allocate hard registers for pseudo-registers local to a basic
372 block.
373
374 Store the desired register order in the array `reg_alloc_order'.
375 Element 0 should be the register to allocate first; element 1, the
376 next register; and so on.
377
378 The macro body should not assume anything about the contents of
379 `reg_alloc_order' before execution of the macro.
380
381 On most machines, it is not necessary to define this macro. */
382
383 void
384 order_regs_for_local_alloc ()
385 {
386 int i, ch, order;
387
388 /* User specified the register allocation order. */
389
390 if (i386_reg_alloc_order)
391 {
392 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
393 {
394 int regno = 0;
395
396 switch (ch)
397 {
398 case 'a': regno = 0; break;
399 case 'd': regno = 1; break;
400 case 'c': regno = 2; break;
401 case 'b': regno = 3; break;
402 case 'S': regno = 4; break;
403 case 'D': regno = 5; break;
404 case 'B': regno = 6; break;
405 }
406
407 reg_alloc_order[order++] = regno;
408 }
409
410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
411 {
412 if (! regs_allocated[i])
413 reg_alloc_order[order++] = i;
414 }
415 }
416
417 /* If user did not specify a register allocation order, use natural order. */
418 else
419 {
420 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
421 reg_alloc_order[i] = i;
422 }
423 }
424 \f
425 void
426 optimization_options (level, size)
427 int level;
428 int size ATTRIBUTE_UNUSED;
429 {
430 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
431 make the problem with not enough registers even worse. */
432 #ifdef INSN_SCHEDULING
433 if (level > 1)
434 flag_schedule_insns = 0;
435 #endif
436 }
437 \f
438 /* Sign-extend a 16-bit constant */
439
440 struct rtx_def *
441 i386_sext16_if_const (op)
442 struct rtx_def *op;
443 {
444 if (GET_CODE (op) == CONST_INT)
445 {
446 HOST_WIDE_INT val = INTVAL (op);
447 HOST_WIDE_INT sext_val;
448 if (val & 0x8000)
449 sext_val = val | ~0xffff;
450 else
451 sext_val = val & 0xffff;
452 if (sext_val != val)
453 op = GEN_INT (sext_val);
454 }
455 return op;
456 }
457 \f
458 /* Return nonzero if the rtx is aligned */
459
460 static int
461 i386_aligned_reg_p (regno)
462 int regno;
463 {
464 return (regno == STACK_POINTER_REGNUM
465 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
466 }
467
468 int
469 i386_aligned_p (op)
470 rtx op;
471 {
472 /* Registers and immediate operands are always "aligned". */
473 if (GET_CODE (op) != MEM)
474 return 1;
475
476 /* Don't even try to do any aligned optimizations with volatiles. */
477 if (MEM_VOLATILE_P (op))
478 return 0;
479
480 /* Get address of memory operand. */
481 op = XEXP (op, 0);
482
483 switch (GET_CODE (op))
484 {
485 case CONST_INT:
486 if (INTVAL (op) & 3)
487 break;
488 return 1;
489
490 /* Match "reg + offset" */
491 case PLUS:
492 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
493 break;
494 if (INTVAL (XEXP (op, 1)) & 3)
495 break;
496
497 op = XEXP (op, 0);
498 if (GET_CODE (op) != REG)
499 break;
500
501 /* ... fall through ... */
502
503 case REG:
504 return i386_aligned_reg_p (REGNO (op));
505
506 default:
507 break;
508 }
509
510 return 0;
511 }
512 \f
513 /* Return nonzero if INSN looks like it won't compute useful cc bits
514 as a side effect. This information is only a hint. */
515
516 int
517 i386_cc_probably_useless_p (insn)
518 rtx insn;
519 {
520 return ! next_cc0_user (insn);
521 }
522 \f
523 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
524 attribute for DECL. The attributes in ATTRIBUTES have previously been
525 assigned to DECL. */
526
527 int
528 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
529 tree decl ATTRIBUTE_UNUSED;
530 tree attributes ATTRIBUTE_UNUSED;
531 tree identifier ATTRIBUTE_UNUSED;
532 tree args ATTRIBUTE_UNUSED;
533 {
534 return 0;
535 }
536
537 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
538 attribute for TYPE. The attributes in ATTRIBUTES have previously been
539 assigned to TYPE. */
540
541 int
542 i386_valid_type_attribute_p (type, attributes, identifier, args)
543 tree type;
544 tree attributes ATTRIBUTE_UNUSED;
545 tree identifier;
546 tree args;
547 {
548 if (TREE_CODE (type) != FUNCTION_TYPE
549 && TREE_CODE (type) != FIELD_DECL
550 && TREE_CODE (type) != TYPE_DECL)
551 return 0;
552
553 /* Stdcall attribute says callee is responsible for popping arguments
554 if they are not variable. */
555 if (is_attribute_p ("stdcall", identifier))
556 return (args == NULL_TREE);
557
558 /* Cdecl attribute says the callee is a normal C declaration. */
559 if (is_attribute_p ("cdecl", identifier))
560 return (args == NULL_TREE);
561
562 /* Regparm attribute specifies how many integer arguments are to be
563 passed in registers. */
564 if (is_attribute_p ("regparm", identifier))
565 {
566 tree cst;
567
568 if (! args || TREE_CODE (args) != TREE_LIST
569 || TREE_CHAIN (args) != NULL_TREE
570 || TREE_VALUE (args) == NULL_TREE)
571 return 0;
572
573 cst = TREE_VALUE (args);
574 if (TREE_CODE (cst) != INTEGER_CST)
575 return 0;
576
577 if (TREE_INT_CST_HIGH (cst) != 0
578 || TREE_INT_CST_LOW (cst) < 0
579 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
580 return 0;
581
582 return 1;
583 }
584
585 return 0;
586 }
587
588 /* Return 0 if the attributes for two types are incompatible, 1 if they
589 are compatible, and 2 if they are nearly compatible (which causes a
590 warning to be generated). */
591
592 int
593 i386_comp_type_attributes (type1, type2)
594 tree type1 ATTRIBUTE_UNUSED;
595 tree type2 ATTRIBUTE_UNUSED;
596 {
597 return 1;
598 }
599
600 \f
601 /* Value is the number of bytes of arguments automatically
602 popped when returning from a subroutine call.
603 FUNDECL is the declaration node of the function (as a tree),
604 FUNTYPE is the data type of the function (as a tree),
605 or for a library call it is an identifier node for the subroutine name.
606 SIZE is the number of bytes of arguments passed on the stack.
607
608 On the 80386, the RTD insn may be used to pop them if the number
609 of args is fixed, but if the number is variable then the caller
610 must pop them all. RTD can't be used for library calls now
611 because the library is compiled with the Unix compiler.
612 Use of RTD is a selectable option, since it is incompatible with
613 standard Unix calling sequences. If the option is not selected,
614 the caller must always pop the args.
615
616 The attribute stdcall is equivalent to RTD on a per module basis. */
617
618 int
619 i386_return_pops_args (fundecl, funtype, size)
620 tree fundecl;
621 tree funtype;
622 int size;
623 {
624 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
625
626 /* Cdecl functions override -mrtd, and never pop the stack. */
627 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
628
629 /* Stdcall functions will pop the stack if not variable args. */
630 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
631 rtd = 1;
632
633 if (rtd
634 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
635 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
636 == void_type_node)))
637 return size;
638 }
639
640 /* Lose any fake structure return argument. */
641 if (aggregate_value_p (TREE_TYPE (funtype)))
642 return GET_MODE_SIZE (Pmode);
643
644 return 0;
645 }
646
647 \f
648 /* Argument support functions. */
649
650 /* Initialize a variable CUM of type CUMULATIVE_ARGS
651 for a call to a function whose data type is FNTYPE.
652 For a library call, FNTYPE is 0. */
653
654 void
655 init_cumulative_args (cum, fntype, libname)
656 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
657 tree fntype; /* tree ptr for function decl */
658 rtx libname; /* SYMBOL_REF of library name or 0 */
659 {
660 static CUMULATIVE_ARGS zero_cum;
661 tree param, next_param;
662
663 if (TARGET_DEBUG_ARG)
664 {
665 fprintf (stderr, "\ninit_cumulative_args (");
666 if (fntype)
667 fprintf (stderr, "fntype code = %s, ret code = %s",
668 tree_code_name[(int) TREE_CODE (fntype)],
669 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
670 else
671 fprintf (stderr, "no fntype");
672
673 if (libname)
674 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
675 }
676
677 *cum = zero_cum;
678
679 /* Set up the number of registers to use for passing arguments. */
680 cum->nregs = i386_regparm;
681 if (fntype)
682 {
683 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
684
685 if (attr)
686 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
687 }
688
689 /* Determine if this function has variable arguments. This is
690 indicated by the last argument being 'void_type_mode' if there
691 are no variable arguments. If there are variable arguments, then
692 we won't pass anything in registers */
693
694 if (cum->nregs)
695 {
696 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
697 param != 0; param = next_param)
698 {
699 next_param = TREE_CHAIN (param);
700 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
701 cum->nregs = 0;
702 }
703 }
704
705 if (TARGET_DEBUG_ARG)
706 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
707
708 return;
709 }
710
711 /* Update the data in CUM to advance over an argument
712 of mode MODE and data type TYPE.
713 (TYPE is null for libcalls where that information may not be available.) */
714
715 void
716 function_arg_advance (cum, mode, type, named)
717 CUMULATIVE_ARGS *cum; /* current arg information */
718 enum machine_mode mode; /* current arg mode */
719 tree type; /* type of the argument or 0 if lib support */
720 int named; /* whether or not the argument was named */
721 {
722 int bytes
723 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
724 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
725
726 if (TARGET_DEBUG_ARG)
727 fprintf (stderr,
728 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
729 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
730
731 cum->words += words;
732 cum->nregs -= words;
733 cum->regno += words;
734
735 if (cum->nregs <= 0)
736 {
737 cum->nregs = 0;
738 cum->regno = 0;
739 }
740
741 return;
742 }
743
744 /* Define where to put the arguments to a function.
745 Value is zero to push the argument on the stack,
746 or a hard register in which to store the argument.
747
748 MODE is the argument's machine mode.
749 TYPE is the data type of the argument (as a tree).
750 This is null for libcalls where that information may
751 not be available.
752 CUM is a variable of type CUMULATIVE_ARGS which gives info about
753 the preceding args and about the function being called.
754 NAMED is nonzero if this argument is a named parameter
755 (otherwise it is an extra parameter matching an ellipsis). */
756
757 struct rtx_def *
758 function_arg (cum, mode, type, named)
759 CUMULATIVE_ARGS *cum; /* current arg information */
760 enum machine_mode mode; /* current arg mode */
761 tree type; /* type of the argument or 0 if lib support */
762 int named; /* != 0 for normal args, == 0 for ... args */
763 {
764 rtx ret = NULL_RTX;
765 int bytes
766 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
767 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
768
769 switch (mode)
770 {
771 /* For now, pass fp/complex values on the stack. */
772 default:
773 break;
774
775 case BLKmode:
776 case DImode:
777 case SImode:
778 case HImode:
779 case QImode:
780 if (words <= cum->nregs)
781 ret = gen_rtx_REG (mode, cum->regno);
782 break;
783 }
784
785 if (TARGET_DEBUG_ARG)
786 {
787 fprintf (stderr,
788 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
789 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
790
791 if (ret)
792 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
793 else
794 fprintf (stderr, ", stack");
795
796 fprintf (stderr, " )\n");
797 }
798
799 return ret;
800 }
801
802 /* For an arg passed partly in registers and partly in memory,
803 this is the number of registers used.
804 For args passed entirely in registers or entirely in memory, zero. */
805
806 int
807 function_arg_partial_nregs (cum, mode, type, named)
808 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED; /* current arg information */
809 enum machine_mode mode ATTRIBUTE_UNUSED; /* current arg mode */
810 tree type ATTRIBUTE_UNUSED; /* type of the argument or 0 if lib support */
811 int named ATTRIBUTE_UNUSED; /* != 0 for normal args, == 0 for ... args */
812 {
813 return 0;
814 }
815 \f
816 /* Output an insn whose source is a 386 integer register. SRC is the
817 rtx for the register, and TEMPLATE is the op-code template. SRC may
818 be either SImode or DImode.
819
820 The template will be output with operands[0] as SRC, and operands[1]
821 as a pointer to the top of the 386 stack. So a call from floatsidf2
822 would look like this:
823
824 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
825
826 where %z0 corresponds to the caller's operands[1], and is used to
827 emit the proper size suffix.
828
829 ??? Extend this to handle HImode - a 387 can load and store HImode
830 values directly. */
831
832 void
833 output_op_from_reg (src, template)
834 rtx src;
835 char *template;
836 {
837 rtx xops[4];
838 int size = GET_MODE_SIZE (GET_MODE (src));
839
840 xops[0] = src;
841 xops[1] = AT_SP (Pmode);
842 xops[2] = GEN_INT (size);
843 xops[3] = stack_pointer_rtx;
844
845 if (size > UNITS_PER_WORD)
846 {
847 rtx high;
848
849 if (size > 2 * UNITS_PER_WORD)
850 {
851 high = gen_rtx_REG (SImode, REGNO (src) + 2);
852 output_asm_insn (AS1 (push%L0,%0), &high);
853 }
854
855 high = gen_rtx_REG (SImode, REGNO (src) + 1);
856 output_asm_insn (AS1 (push%L0,%0), &high);
857 }
858
859 output_asm_insn (AS1 (push%L0,%0), &src);
860 output_asm_insn (template, xops);
861 output_asm_insn (AS2 (add%L3,%2,%3), xops);
862 }
863 \f
864 /* Output an insn to pop an value from the 387 top-of-stack to 386
865 register DEST. The 387 register stack is popped if DIES is true. If
866 the mode of DEST is an integer mode, a `fist' integer store is done,
867 otherwise a `fst' float store is done. */
868
869 void
870 output_to_reg (dest, dies, scratch_mem)
871 rtx dest;
872 int dies;
873 rtx scratch_mem;
874 {
875 rtx xops[4];
876 int size = GET_MODE_SIZE (GET_MODE (dest));
877
878 if (! scratch_mem)
879 xops[0] = AT_SP (Pmode);
880 else
881 xops[0] = scratch_mem;
882
883 xops[1] = stack_pointer_rtx;
884 xops[2] = GEN_INT (size);
885 xops[3] = dest;
886
887 if (! scratch_mem)
888 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
889
890 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
891 {
892 if (dies)
893 output_asm_insn (AS1 (fistp%z3,%y0), xops);
894 else
895 output_asm_insn (AS1 (fist%z3,%y0), xops);
896 }
897
898 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
899 {
900 if (dies)
901 output_asm_insn (AS1 (fstp%z3,%y0), xops);
902 else
903 {
904 if (GET_MODE (dest) == XFmode)
905 {
906 output_asm_insn (AS1 (fstp%z3,%y0), xops);
907 output_asm_insn (AS1 (fld%z3,%y0), xops);
908 }
909 else
910 output_asm_insn (AS1 (fst%z3,%y0), xops);
911 }
912 }
913
914 else
915 abort ();
916
917 if (! scratch_mem)
918 output_asm_insn (AS1 (pop%L0,%0), &dest);
919 else
920 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
921
922
923 if (size > UNITS_PER_WORD)
924 {
925 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
926 if (! scratch_mem)
927 output_asm_insn (AS1 (pop%L0,%0), &dest);
928 else
929 {
930 xops[0] = adj_offsettable_operand (xops[0], 4);
931 xops[3] = dest;
932 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
933 }
934
935 if (size > 2 * UNITS_PER_WORD)
936 {
937 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
938 if (! scratch_mem)
939 output_asm_insn (AS1 (pop%L0,%0), &dest);
940 else
941 {
942 xops[0] = adj_offsettable_operand (xops[0], 4);
943 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
944 }
945 }
946 }
947 }
948 \f
949 char *
950 singlemove_string (operands)
951 rtx *operands;
952 {
953 rtx x;
954 if (GET_CODE (operands[0]) == MEM
955 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
956 {
957 if (XEXP (x, 0) != stack_pointer_rtx)
958 abort ();
959 return "push%L1 %1";
960 }
961 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
962 return output_move_const_single (operands);
963 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
964 return AS2 (mov%L0,%1,%0);
965 else if (CONSTANT_P (operands[1]))
966 return AS2 (mov%L0,%1,%0);
967 else
968 {
969 output_asm_insn ("push%L1 %1", operands);
970 return "pop%L0 %0";
971 }
972 }
973 \f
974 /* Return a REG that occurs in ADDR with coefficient 1.
975 ADDR can be effectively incremented by incrementing REG. */
976
977 static rtx
978 find_addr_reg (addr)
979 rtx addr;
980 {
981 while (GET_CODE (addr) == PLUS)
982 {
983 if (GET_CODE (XEXP (addr, 0)) == REG)
984 addr = XEXP (addr, 0);
985 else if (GET_CODE (XEXP (addr, 1)) == REG)
986 addr = XEXP (addr, 1);
987 else if (CONSTANT_P (XEXP (addr, 0)))
988 addr = XEXP (addr, 1);
989 else if (CONSTANT_P (XEXP (addr, 1)))
990 addr = XEXP (addr, 0);
991 else
992 abort ();
993 }
994
995 if (GET_CODE (addr) == REG)
996 return addr;
997 abort ();
998 }
999 \f
1000 /* Output an insn to add the constant N to the register X. */
1001
1002 static void
1003 asm_add (n, x)
1004 int n;
1005 rtx x;
1006 {
1007 rtx xops[2];
1008 xops[0] = x;
1009
1010 if (n == -1)
1011 output_asm_insn (AS1 (dec%L0,%0), xops);
1012 else if (n == 1)
1013 output_asm_insn (AS1 (inc%L0,%0), xops);
1014 else if (n < 0 || n == 128)
1015 {
1016 xops[1] = GEN_INT (-n);
1017 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1018 }
1019 else if (n > 0)
1020 {
1021 xops[1] = GEN_INT (n);
1022 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1023 }
1024 }
1025 \f
1026 /* Output assembler code to perform a doubleword move insn
1027 with operands OPERANDS. */
1028
1029 char *
1030 output_move_double (operands)
1031 rtx *operands;
1032 {
1033 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1034 rtx latehalf[2];
1035 rtx middlehalf[2];
1036 rtx xops[2];
1037 rtx addreg0 = 0, addreg1 = 0;
1038 int dest_overlapped_low = 0;
1039 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1040
1041 middlehalf[0] = 0;
1042 middlehalf[1] = 0;
1043
1044 /* First classify both operands. */
1045
1046 if (REG_P (operands[0]))
1047 optype0 = REGOP;
1048 else if (offsettable_memref_p (operands[0]))
1049 optype0 = OFFSOP;
1050 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1051 optype0 = POPOP;
1052 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1053 optype0 = PUSHOP;
1054 else if (GET_CODE (operands[0]) == MEM)
1055 optype0 = MEMOP;
1056 else
1057 optype0 = RNDOP;
1058
1059 if (REG_P (operands[1]))
1060 optype1 = REGOP;
1061 else if (CONSTANT_P (operands[1]))
1062 optype1 = CNSTOP;
1063 else if (offsettable_memref_p (operands[1]))
1064 optype1 = OFFSOP;
1065 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1066 optype1 = POPOP;
1067 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1068 optype1 = PUSHOP;
1069 else if (GET_CODE (operands[1]) == MEM)
1070 optype1 = MEMOP;
1071 else
1072 optype1 = RNDOP;
1073
1074 /* Check for the cases that the operand constraints are not
1075 supposed to allow to happen. Abort if we get one,
1076 because generating code for these cases is painful. */
1077
1078 if (optype0 == RNDOP || optype1 == RNDOP)
1079 abort ();
1080
1081 /* If one operand is decrementing and one is incrementing
1082 decrement the former register explicitly
1083 and change that operand into ordinary indexing. */
1084
1085 if (optype0 == PUSHOP && optype1 == POPOP)
1086 {
1087 /* ??? Can this ever happen on i386? */
1088 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1089 asm_add (-size, operands[0]);
1090 if (GET_MODE (operands[1]) == XFmode)
1091 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1092 else if (GET_MODE (operands[0]) == DFmode)
1093 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1094 else
1095 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1096 optype0 = OFFSOP;
1097 }
1098
1099 if (optype0 == POPOP && optype1 == PUSHOP)
1100 {
1101 /* ??? Can this ever happen on i386? */
1102 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1103 asm_add (-size, operands[1]);
1104 if (GET_MODE (operands[1]) == XFmode)
1105 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1106 else if (GET_MODE (operands[1]) == DFmode)
1107 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1108 else
1109 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1110 optype1 = OFFSOP;
1111 }
1112
1113 /* If an operand is an unoffsettable memory ref, find a register
1114 we can increment temporarily to make it refer to the second word. */
1115
1116 if (optype0 == MEMOP)
1117 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1118
1119 if (optype1 == MEMOP)
1120 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1121
1122 /* Ok, we can do one word at a time.
1123 Normally we do the low-numbered word first,
1124 but if either operand is autodecrementing then we
1125 do the high-numbered word first.
1126
1127 In either case, set up in LATEHALF the operands to use
1128 for the high-numbered word and in some cases alter the
1129 operands in OPERANDS to be suitable for the low-numbered word. */
1130
1131 if (size == 12)
1132 {
1133 if (optype0 == REGOP)
1134 {
1135 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1136 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1137 }
1138 else if (optype0 == OFFSOP)
1139 {
1140 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1141 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1142 }
1143 else
1144 {
1145 middlehalf[0] = operands[0];
1146 latehalf[0] = operands[0];
1147 }
1148
1149 if (optype1 == REGOP)
1150 {
1151 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1152 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1153 }
1154 else if (optype1 == OFFSOP)
1155 {
1156 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1157 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1158 }
1159 else if (optype1 == CNSTOP)
1160 {
1161 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1162 {
1163 REAL_VALUE_TYPE r; long l[3];
1164
1165 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1166 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1167 operands[1] = GEN_INT (l[0]);
1168 middlehalf[1] = GEN_INT (l[1]);
1169 latehalf[1] = GEN_INT (l[2]);
1170 }
1171 else if (CONSTANT_P (operands[1]))
1172 /* No non-CONST_DOUBLE constant should ever appear here. */
1173 abort ();
1174 }
1175 else
1176 {
1177 middlehalf[1] = operands[1];
1178 latehalf[1] = operands[1];
1179 }
1180 }
1181
1182 else
1183 {
1184 /* Size is not 12. */
1185
1186 if (optype0 == REGOP)
1187 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1188 else if (optype0 == OFFSOP)
1189 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1190 else
1191 latehalf[0] = operands[0];
1192
1193 if (optype1 == REGOP)
1194 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1195 else if (optype1 == OFFSOP)
1196 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1197 else if (optype1 == CNSTOP)
1198 split_double (operands[1], &operands[1], &latehalf[1]);
1199 else
1200 latehalf[1] = operands[1];
1201 }
1202
1203 /* If insn is effectively movd N (sp),-(sp) then we will do the
1204 high word first. We should use the adjusted operand 1
1205 (which is N+4 (sp) or N+8 (sp))
1206 for the low word and middle word as well,
1207 to compensate for the first decrement of sp. */
1208 if (optype0 == PUSHOP
1209 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1210 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1211 middlehalf[1] = operands[1] = latehalf[1];
1212
1213 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1214 if the upper part of reg N does not appear in the MEM, arrange to
1215 emit the move late-half first. Otherwise, compute the MEM address
1216 into the upper part of N and use that as a pointer to the memory
1217 operand. */
1218 if (optype0 == REGOP
1219 && (optype1 == OFFSOP || optype1 == MEMOP))
1220 {
1221 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1222 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1223 {
1224 /* If both halves of dest are used in the src memory address,
1225 compute the address into latehalf of dest. */
1226 compadr:
1227 xops[0] = latehalf[0];
1228 xops[1] = XEXP (operands[1], 0);
1229 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1230 if (GET_MODE (operands[1]) == XFmode)
1231 {
1232 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1233 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1234 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1235 }
1236 else
1237 {
1238 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1239 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1240 }
1241 }
1242
1243 else if (size == 12
1244 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1245 {
1246 /* Check for two regs used by both source and dest. */
1247 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1248 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1249 goto compadr;
1250
1251 /* JRV says this can't happen: */
1252 if (addreg0 || addreg1)
1253 abort ();
1254
1255 /* Only the middle reg conflicts; simply put it last. */
1256 output_asm_insn (singlemove_string (operands), operands);
1257 output_asm_insn (singlemove_string (latehalf), latehalf);
1258 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1259 return "";
1260 }
1261
1262 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1263 /* If the low half of dest is mentioned in the source memory
1264 address, the arrange to emit the move late half first. */
1265 dest_overlapped_low = 1;
1266 }
1267
1268 /* If one or both operands autodecrementing,
1269 do the two words, high-numbered first. */
1270
1271 /* Likewise, the first move would clobber the source of the second one,
1272 do them in the other order. This happens only for registers;
1273 such overlap can't happen in memory unless the user explicitly
1274 sets it up, and that is an undefined circumstance. */
1275
1276 #if 0
1277 if (optype0 == PUSHOP || optype1 == PUSHOP
1278 || (optype0 == REGOP && optype1 == REGOP
1279 && REGNO (operands[0]) == REGNO (latehalf[1]))
1280 || dest_overlapped_low)
1281 #endif
1282
1283 if (optype0 == PUSHOP || optype1 == PUSHOP
1284 || (optype0 == REGOP && optype1 == REGOP
1285 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1286 || REGNO (operands[0]) == REGNO (latehalf[1])))
1287 || dest_overlapped_low)
1288 {
1289 /* Make any unoffsettable addresses point at high-numbered word. */
1290 if (addreg0)
1291 asm_add (size-4, addreg0);
1292 if (addreg1)
1293 asm_add (size-4, addreg1);
1294
1295 /* Do that word. */
1296 output_asm_insn (singlemove_string (latehalf), latehalf);
1297
1298 /* Undo the adds we just did. */
1299 if (addreg0)
1300 asm_add (-4, addreg0);
1301 if (addreg1)
1302 asm_add (-4, addreg1);
1303
1304 if (size == 12)
1305 {
1306 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1307 if (addreg0)
1308 asm_add (-4, addreg0);
1309 if (addreg1)
1310 asm_add (-4, addreg1);
1311 }
1312
1313 /* Do low-numbered word. */
1314 return singlemove_string (operands);
1315 }
1316
1317 /* Normal case: do the two words, low-numbered first. */
1318
1319 output_asm_insn (singlemove_string (operands), operands);
1320
1321 /* Do the middle one of the three words for long double */
1322 if (size == 12)
1323 {
1324 if (addreg0)
1325 asm_add (4, addreg0);
1326 if (addreg1)
1327 asm_add (4, addreg1);
1328
1329 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1330 }
1331
1332 /* Make any unoffsettable addresses point at high-numbered word. */
1333 if (addreg0)
1334 asm_add (4, addreg0);
1335 if (addreg1)
1336 asm_add (4, addreg1);
1337
1338 /* Do that word. */
1339 output_asm_insn (singlemove_string (latehalf), latehalf);
1340
1341 /* Undo the adds we just did. */
1342 if (addreg0)
1343 asm_add (4-size, addreg0);
1344 if (addreg1)
1345 asm_add (4-size, addreg1);
1346
1347 return "";
1348 }
1349 \f
1350 #define MAX_TMPS 2 /* max temporary registers used */
1351
1352 /* Output the appropriate code to move push memory on the stack */
1353
1354 char *
1355 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1356 rtx operands[];
1357 rtx insn;
1358 int length;
1359 int tmp_start;
1360 int n_operands;
1361 {
1362 struct
1363 {
1364 char *load;
1365 char *push;
1366 rtx xops[2];
1367 } tmp_info[MAX_TMPS];
1368
1369 rtx src = operands[1];
1370 int max_tmps = 0;
1371 int offset = 0;
1372 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1373 int stack_offset = 0;
1374 int i, num_tmps;
1375 rtx xops[1];
1376
1377 if (! offsettable_memref_p (src))
1378 fatal_insn ("Source is not offsettable", insn);
1379
1380 if ((length & 3) != 0)
1381 fatal_insn ("Pushing non-word aligned size", insn);
1382
1383 /* Figure out which temporary registers we have available */
1384 for (i = tmp_start; i < n_operands; i++)
1385 {
1386 if (GET_CODE (operands[i]) == REG)
1387 {
1388 if (reg_overlap_mentioned_p (operands[i], src))
1389 continue;
1390
1391 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1392 if (max_tmps == MAX_TMPS)
1393 break;
1394 }
1395 }
1396
1397 if (max_tmps == 0)
1398 for (offset = length - 4; offset >= 0; offset -= 4)
1399 {
1400 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1401 output_asm_insn (AS1(push%L0,%0), xops);
1402 if (stack_p)
1403 stack_offset += 4;
1404 }
1405
1406 else
1407 for (offset = length - 4; offset >= 0; )
1408 {
1409 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1410 {
1411 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1412 tmp_info[num_tmps].push = AS1(push%L0,%1);
1413 tmp_info[num_tmps].xops[0]
1414 = adj_offsettable_operand (src, offset + stack_offset);
1415 offset -= 4;
1416 }
1417
1418 for (i = 0; i < num_tmps; i++)
1419 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1420
1421 for (i = 0; i < num_tmps; i++)
1422 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1423
1424 if (stack_p)
1425 stack_offset += 4*num_tmps;
1426 }
1427
1428 return "";
1429 }
1430 \f
1431 /* Output the appropriate code to move data between two memory locations */
1432
1433 char *
1434 output_move_memory (operands, insn, length, tmp_start, n_operands)
1435 rtx operands[];
1436 rtx insn;
1437 int length;
1438 int tmp_start;
1439 int n_operands;
1440 {
1441 struct
1442 {
1443 char *load;
1444 char *store;
1445 rtx xops[3];
1446 } tmp_info[MAX_TMPS];
1447
1448 rtx dest = operands[0];
1449 rtx src = operands[1];
1450 rtx qi_tmp = NULL_RTX;
1451 int max_tmps = 0;
1452 int offset = 0;
1453 int i, num_tmps;
1454 rtx xops[3];
1455
1456 if (GET_CODE (dest) == MEM
1457 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1458 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1459 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1460
1461 if (! offsettable_memref_p (src))
1462 fatal_insn ("Source is not offsettable", insn);
1463
1464 if (! offsettable_memref_p (dest))
1465 fatal_insn ("Destination is not offsettable", insn);
1466
1467 /* Figure out which temporary registers we have available */
1468 for (i = tmp_start; i < n_operands; i++)
1469 {
1470 if (GET_CODE (operands[i]) == REG)
1471 {
1472 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1473 qi_tmp = operands[i];
1474
1475 if (reg_overlap_mentioned_p (operands[i], dest))
1476 fatal_insn ("Temporary register overlaps the destination", insn);
1477
1478 if (reg_overlap_mentioned_p (operands[i], src))
1479 fatal_insn ("Temporary register overlaps the source", insn);
1480
1481 tmp_info[max_tmps++].xops[2] = operands[i];
1482 if (max_tmps == MAX_TMPS)
1483 break;
1484 }
1485 }
1486
1487 if (max_tmps == 0)
1488 fatal_insn ("No scratch registers were found to do memory->memory moves",
1489 insn);
1490
1491 if ((length & 1) != 0)
1492 {
1493 if (qi_tmp == 0)
1494 fatal_insn ("No byte register found when moving odd # of bytes.",
1495 insn);
1496 }
1497
1498 while (length > 1)
1499 {
1500 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1501 {
1502 if (length >= 4)
1503 {
1504 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1505 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1506 tmp_info[num_tmps].xops[0]
1507 = adj_offsettable_operand (dest, offset);
1508 tmp_info[num_tmps].xops[1]
1509 = adj_offsettable_operand (src, offset);
1510
1511 offset += 4;
1512 length -= 4;
1513 }
1514
1515 else if (length >= 2)
1516 {
1517 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1518 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1519 tmp_info[num_tmps].xops[0]
1520 = adj_offsettable_operand (dest, offset);
1521 tmp_info[num_tmps].xops[1]
1522 = adj_offsettable_operand (src, offset);
1523
1524 offset += 2;
1525 length -= 2;
1526 }
1527 else
1528 break;
1529 }
1530
1531 for (i = 0; i < num_tmps; i++)
1532 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1533
1534 for (i = 0; i < num_tmps; i++)
1535 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1536 }
1537
1538 if (length == 1)
1539 {
1540 xops[0] = adj_offsettable_operand (dest, offset);
1541 xops[1] = adj_offsettable_operand (src, offset);
1542 xops[2] = qi_tmp;
1543 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1544 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1545 }
1546
1547 return "";
1548 }
1549 \f
1550 int
1551 standard_80387_constant_p (x)
1552 rtx x;
1553 {
1554 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1555 REAL_VALUE_TYPE d;
1556 jmp_buf handler;
1557 int is0, is1;
1558
1559 if (setjmp (handler))
1560 return 0;
1561
1562 set_float_handler (handler);
1563 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1564 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1565 is1 = REAL_VALUES_EQUAL (d, dconst1);
1566 set_float_handler (NULL_PTR);
1567
1568 if (is0)
1569 return 1;
1570
1571 if (is1)
1572 return 2;
1573
1574 /* Note that on the 80387, other constants, such as pi,
1575 are much slower to load as standard constants
1576 than to load from doubles in memory! */
1577 #endif
1578
1579 return 0;
1580 }
1581
1582 char *
1583 output_move_const_single (operands)
1584 rtx *operands;
1585 {
1586 if (FP_REG_P (operands[0]))
1587 {
1588 int conval = standard_80387_constant_p (operands[1]);
1589
1590 if (conval == 1)
1591 return "fldz";
1592
1593 if (conval == 2)
1594 return "fld1";
1595 }
1596
1597 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1598 {
1599 REAL_VALUE_TYPE r; long l;
1600
1601 if (GET_MODE (operands[1]) == XFmode)
1602 abort ();
1603
1604 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1605 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1606 operands[1] = GEN_INT (l);
1607 }
1608
1609 return singlemove_string (operands);
1610 }
1611 \f
1612 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1613 reference and a constant. */
1614
1615 int
1616 symbolic_operand (op, mode)
1617 register rtx op;
1618 enum machine_mode mode ATTRIBUTE_UNUSED;
1619 {
1620 switch (GET_CODE (op))
1621 {
1622 case SYMBOL_REF:
1623 case LABEL_REF:
1624 return 1;
1625
1626 case CONST:
1627 op = XEXP (op, 0);
1628 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1629 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1630 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1631
1632 default:
1633 return 0;
1634 }
1635 }
1636
1637 /* Test for a valid operand for a call instruction.
1638 Don't allow the arg pointer register or virtual regs
1639 since they may change into reg + const, which the patterns
1640 can't handle yet. */
1641
1642 int
1643 call_insn_operand (op, mode)
1644 rtx op;
1645 enum machine_mode mode ATTRIBUTE_UNUSED;
1646 {
1647 if (GET_CODE (op) == MEM
1648 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1649 /* This makes a difference for PIC. */
1650 && general_operand (XEXP (op, 0), Pmode))
1651 || (GET_CODE (XEXP (op, 0)) == REG
1652 && XEXP (op, 0) != arg_pointer_rtx
1653 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1654 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1655 return 1;
1656
1657 return 0;
1658 }
1659
1660 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1661 even if pic. */
1662
1663 int
1664 expander_call_insn_operand (op, mode)
1665 rtx op;
1666 enum machine_mode mode ATTRIBUTE_UNUSED;
1667 {
1668 if (GET_CODE (op) == MEM
1669 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1670 || (GET_CODE (XEXP (op, 0)) == REG
1671 && XEXP (op, 0) != arg_pointer_rtx
1672 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1673 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1674 return 1;
1675
1676 return 0;
1677 }
1678
1679 /* Return 1 if OP is a comparison operator that can use the condition code
1680 generated by an arithmetic operation. */
1681
1682 int
1683 arithmetic_comparison_operator (op, mode)
1684 register rtx op;
1685 enum machine_mode mode;
1686 {
1687 enum rtx_code code;
1688
1689 if (mode != VOIDmode && mode != GET_MODE (op))
1690 return 0;
1691
1692 code = GET_CODE (op);
1693 if (GET_RTX_CLASS (code) != '<')
1694 return 0;
1695
1696 return (code != GT && code != LE);
1697 }
1698
1699 int
1700 ix86_logical_operator (op, mode)
1701 register rtx op;
1702 enum machine_mode mode ATTRIBUTE_UNUSED;
1703 {
1704 return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1705 }
1706
1707 \f
1708 /* Returns 1 if OP contains a symbol reference */
1709
1710 int
1711 symbolic_reference_mentioned_p (op)
1712 rtx op;
1713 {
1714 register char *fmt;
1715 register int i;
1716
1717 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1718 return 1;
1719
1720 fmt = GET_RTX_FORMAT (GET_CODE (op));
1721 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1722 {
1723 if (fmt[i] == 'E')
1724 {
1725 register int j;
1726
1727 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1728 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1729 return 1;
1730 }
1731
1732 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1733 return 1;
1734 }
1735
1736 return 0;
1737 }
1738 \f
1739 /* Attempt to expand a binary operator. Make the expansion closer to the
1740 actual machine, then just general_operand, which will allow 3 separate
1741 memory references (one output, two input) in a single insn. Return
1742 whether the insn fails, or succeeds. */
1743
1744 int
1745 ix86_expand_binary_operator (code, mode, operands)
1746 enum rtx_code code;
1747 enum machine_mode mode;
1748 rtx operands[];
1749 {
1750 int modified;
1751
1752 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1753 if (GET_RTX_CLASS (code) == 'c'
1754 && (rtx_equal_p (operands[0], operands[2])
1755 || immediate_operand (operands[1], mode)))
1756 {
1757 rtx temp = operands[1];
1758 operands[1] = operands[2];
1759 operands[2] = temp;
1760 }
1761
1762 /* If optimizing, copy to regs to improve CSE */
1763 if (TARGET_PSEUDO && optimize
1764 && ((reload_in_progress | reload_completed) == 0))
1765 {
1766 if (GET_CODE (operands[1]) == MEM
1767 && ! rtx_equal_p (operands[0], operands[1]))
1768 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1769
1770 if (GET_CODE (operands[2]) == MEM)
1771 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1772
1773 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1774 {
1775 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1776
1777 emit_move_insn (temp, operands[1]);
1778 operands[1] = temp;
1779 return TRUE;
1780 }
1781 }
1782
1783 if (!ix86_binary_operator_ok (code, mode, operands))
1784 {
1785 /* If not optimizing, try to make a valid insn (optimize code
1786 previously did this above to improve chances of CSE) */
1787
1788 if ((! TARGET_PSEUDO || !optimize)
1789 && ((reload_in_progress | reload_completed) == 0)
1790 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1791 {
1792 modified = FALSE;
1793 if (GET_CODE (operands[1]) == MEM
1794 && ! rtx_equal_p (operands[0], operands[1]))
1795 {
1796 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1797 modified = TRUE;
1798 }
1799
1800 if (GET_CODE (operands[2]) == MEM)
1801 {
1802 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1803 modified = TRUE;
1804 }
1805
1806 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1807 {
1808 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1809
1810 emit_move_insn (temp, operands[1]);
1811 operands[1] = temp;
1812 return TRUE;
1813 }
1814
1815 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1816 return FALSE;
1817 }
1818 else
1819 return FALSE;
1820 }
1821
1822 return TRUE;
1823 }
1824 \f
1825 /* Return TRUE or FALSE depending on whether the binary operator meets the
1826 appropriate constraints. */
1827
1828 int
1829 ix86_binary_operator_ok (code, mode, operands)
1830 enum rtx_code code;
1831 enum machine_mode mode ATTRIBUTE_UNUSED;
1832 rtx operands[3];
1833 {
1834 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1835 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1836 }
1837 \f
1838 /* Attempt to expand a unary operator. Make the expansion closer to the
1839 actual machine, then just general_operand, which will allow 2 separate
1840 memory references (one output, one input) in a single insn. Return
1841 whether the insn fails, or succeeds. */
1842
1843 int
1844 ix86_expand_unary_operator (code, mode, operands)
1845 enum rtx_code code;
1846 enum machine_mode mode;
1847 rtx operands[];
1848 {
1849 /* If optimizing, copy to regs to improve CSE */
1850 if (TARGET_PSEUDO
1851 && optimize
1852 && ((reload_in_progress | reload_completed) == 0)
1853 && GET_CODE (operands[1]) == MEM)
1854 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1855
1856 if (! ix86_unary_operator_ok (code, mode, operands))
1857 {
1858 if ((! TARGET_PSEUDO || optimize == 0)
1859 && ((reload_in_progress | reload_completed) == 0)
1860 && GET_CODE (operands[1]) == MEM)
1861 {
1862 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1863 if (! ix86_unary_operator_ok (code, mode, operands))
1864 return FALSE;
1865 }
1866 else
1867 return FALSE;
1868 }
1869
1870 return TRUE;
1871 }
1872 \f
1873 /* Return TRUE or FALSE depending on whether the unary operator meets the
1874 appropriate constraints. */
1875
1876 int
1877 ix86_unary_operator_ok (code, mode, operands)
1878 enum rtx_code code ATTRIBUTE_UNUSED;
1879 enum machine_mode mode ATTRIBUTE_UNUSED;
1880 rtx operands[2] ATTRIBUTE_UNUSED;
1881 {
1882 return TRUE;
1883 }
1884 \f
1885 static rtx pic_label_rtx;
1886 static char pic_label_name [256];
1887 static int pic_label_no = 0;
1888
1889 /* This function generates code for -fpic that loads %ebx with
1890 the return address of the caller and then returns. */
1891
1892 void
1893 asm_output_function_prefix (file, name)
1894 FILE *file;
1895 char *name ATTRIBUTE_UNUSED;
1896 {
1897 rtx xops[2];
1898 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1899 || current_function_uses_const_pool);
1900 xops[0] = pic_offset_table_rtx;
1901 xops[1] = stack_pointer_rtx;
1902
1903 /* Deep branch prediction favors having a return for every call. */
1904 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1905 {
1906 tree prologue_node;
1907
1908 if (pic_label_rtx == 0)
1909 {
1910 pic_label_rtx = gen_label_rtx ();
1911 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1912 LABEL_NAME (pic_label_rtx) = pic_label_name;
1913 }
1914
1915 prologue_node = make_node (FUNCTION_DECL);
1916 DECL_RESULT (prologue_node) = 0;
1917 #ifdef ASM_DECLARE_FUNCTION_NAME
1918 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1919 #endif
1920 output_asm_insn ("movl (%1),%0", xops);
1921 output_asm_insn ("ret", xops);
1922 }
1923 }
1924
1925 /* Generate the assembly code for function entry.
1926 FILE is an stdio stream to output the code to.
1927 SIZE is an int: how many units of temporary storage to allocate. */
1928
1929 void
1930 function_prologue (file, size)
1931 FILE *file ATTRIBUTE_UNUSED;
1932 int size ATTRIBUTE_UNUSED;
1933 {
1934 if (TARGET_SCHEDULE_PROLOGUE)
1935 {
1936 pic_label_rtx = 0;
1937 return;
1938 }
1939
1940 ix86_prologue (0);
1941 }
1942
1943 /* Expand the prologue into a bunch of separate insns. */
1944
1945 void
1946 ix86_expand_prologue ()
1947 {
1948 if (! TARGET_SCHEDULE_PROLOGUE)
1949 return;
1950
1951 ix86_prologue (1);
1952 }
1953
1954 void
1955 load_pic_register (do_rtl)
1956 int do_rtl;
1957 {
1958 rtx xops[4];
1959
1960 if (TARGET_DEEP_BRANCH_PREDICTION)
1961 {
1962 xops[0] = pic_offset_table_rtx;
1963 if (pic_label_rtx == 0)
1964 {
1965 pic_label_rtx = gen_label_rtx ();
1966 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1967 LABEL_NAME (pic_label_rtx) = pic_label_name;
1968 }
1969
1970 xops[1] = gen_rtx_MEM (QImode,
1971 gen_rtx (SYMBOL_REF, Pmode,
1972 LABEL_NAME (pic_label_rtx)));
1973
1974 if (do_rtl)
1975 {
1976 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1977 emit_insn (gen_prologue_set_got (xops[0],
1978 gen_rtx (SYMBOL_REF, Pmode,
1979 "$_GLOBAL_OFFSET_TABLE_"),
1980 xops[1]));
1981 }
1982 else
1983 {
1984 output_asm_insn (AS1 (call,%X1), xops);
1985 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1986 pic_label_rtx = 0;
1987 }
1988 }
1989
1990 else
1991 {
1992 xops[0] = pic_offset_table_rtx;
1993 xops[1] = gen_label_rtx ();
1994
1995 if (do_rtl)
1996 {
1997 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
1998 a new CODE_LABEL after reload, so we need a single pattern to
1999 emit the 3 necessary instructions. */
2000 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2001 }
2002 else
2003 {
2004 output_asm_insn (AS1 (call,%P1), xops);
2005 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2006 CODE_LABEL_NUMBER (xops[1]));
2007 output_asm_insn (AS1 (pop%L0,%0), xops);
2008 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2009 }
2010 }
2011
2012 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2013 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2014 moved before any instruction which implicitly uses the got. */
2015
2016 if (do_rtl)
2017 emit_insn (gen_blockage ());
2018 }
2019
2020 static void
2021 ix86_prologue (do_rtl)
2022 int do_rtl;
2023 {
2024 register int regno;
2025 int limit;
2026 rtx xops[4];
2027 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2028 || current_function_uses_const_pool);
2029 long tsize = get_frame_size ();
2030 rtx insn;
2031 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2032
2033 xops[0] = stack_pointer_rtx;
2034 xops[1] = frame_pointer_rtx;
2035 xops[2] = GEN_INT (tsize);
2036
2037 if (frame_pointer_needed)
2038 {
2039 if (do_rtl)
2040 {
2041 insn = emit_insn (gen_rtx (SET, VOIDmode,
2042 gen_rtx_MEM (SImode,
2043 gen_rtx (PRE_DEC, SImode,
2044 stack_pointer_rtx)),
2045 frame_pointer_rtx));
2046
2047 RTX_FRAME_RELATED_P (insn) = 1;
2048 insn = emit_move_insn (xops[1], xops[0]);
2049 RTX_FRAME_RELATED_P (insn) = 1;
2050 }
2051
2052 else
2053 {
2054 output_asm_insn ("push%L1 %1", xops);
2055 #ifdef INCOMING_RETURN_ADDR_RTX
2056 if (dwarf2out_do_frame ())
2057 {
2058 char *l = dwarf2out_cfi_label ();
2059
2060 cfa_store_offset += 4;
2061 cfa_offset = cfa_store_offset;
2062 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2063 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2064 }
2065 #endif
2066
2067 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2068 #ifdef INCOMING_RETURN_ADDR_RTX
2069 if (dwarf2out_do_frame ())
2070 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2071 #endif
2072 }
2073 }
2074
2075 if (tsize == 0)
2076 ;
2077 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2078 {
2079 if (do_rtl)
2080 {
2081 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2082 RTX_FRAME_RELATED_P (insn) = 1;
2083 }
2084 else
2085 {
2086 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2087 #ifdef INCOMING_RETURN_ADDR_RTX
2088 if (dwarf2out_do_frame ())
2089 {
2090 cfa_store_offset += tsize;
2091 if (! frame_pointer_needed)
2092 {
2093 cfa_offset = cfa_store_offset;
2094 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2095 }
2096 }
2097 #endif
2098 }
2099 }
2100 else
2101 {
2102 xops[3] = gen_rtx_REG (SImode, 0);
2103 if (do_rtl)
2104 emit_move_insn (xops[3], xops[2]);
2105 else
2106 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2107
2108 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2109 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2110
2111 if (do_rtl)
2112 emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2113 else
2114 output_asm_insn (AS1 (call,%P3), xops);
2115 }
2116
2117 /* Note If use enter it is NOT reversed args.
2118 This one is not reversed from intel!!
2119 I think enter is slower. Also sdb doesn't like it.
2120 But if you want it the code is:
2121 {
2122 xops[3] = const0_rtx;
2123 output_asm_insn ("enter %2,%3", xops);
2124 }
2125 */
2126
2127 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2128 for (regno = limit - 1; regno >= 0; regno--)
2129 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2130 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2131 {
2132 xops[0] = gen_rtx_REG (SImode, regno);
2133 if (do_rtl)
2134 {
2135 insn = emit_insn (gen_rtx (SET, VOIDmode,
2136 gen_rtx_MEM (SImode,
2137 gen_rtx (PRE_DEC, SImode,
2138 stack_pointer_rtx)),
2139 xops[0]));
2140
2141 RTX_FRAME_RELATED_P (insn) = 1;
2142 }
2143 else
2144 {
2145 output_asm_insn ("push%L0 %0", xops);
2146 #ifdef INCOMING_RETURN_ADDR_RTX
2147 if (dwarf2out_do_frame ())
2148 {
2149 char *l = dwarf2out_cfi_label ();
2150
2151 cfa_store_offset += 4;
2152 if (! frame_pointer_needed)
2153 {
2154 cfa_offset = cfa_store_offset;
2155 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2156 }
2157
2158 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2159 }
2160 #endif
2161 }
2162 }
2163
2164 if (pic_reg_used)
2165 load_pic_register (do_rtl);
2166
2167 /* If we are profiling, make sure no instructions are scheduled before
2168 the call to mcount. However, if -fpic, the above call will have
2169 done that. */
2170 if ((profile_flag || profile_block_flag)
2171 && ! pic_reg_used && do_rtl)
2172 emit_insn (gen_blockage ());
2173 }
2174
2175 /* Return 1 if it is appropriate to emit `ret' instructions in the
2176 body of a function. Do this only if the epilogue is simple, needing a
2177 couple of insns. Prior to reloading, we can't tell how many registers
2178 must be saved, so return 0 then. Return 0 if there is no frame
2179 marker to de-allocate.
2180
2181 If NON_SAVING_SETJMP is defined and true, then it is not possible
2182 for the epilogue to be simple, so return 0. This is a special case
2183 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2184 until final, but jump_optimize may need to know sooner if a
2185 `return' is OK. */
2186
2187 int
2188 ix86_can_use_return_insn_p ()
2189 {
2190 int regno;
2191 int nregs = 0;
2192 int reglimit = (frame_pointer_needed
2193 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2194 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2195 || current_function_uses_const_pool);
2196
2197 #ifdef NON_SAVING_SETJMP
2198 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2199 return 0;
2200 #endif
2201
2202 if (! reload_completed)
2203 return 0;
2204
2205 for (regno = reglimit - 1; regno >= 0; regno--)
2206 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2207 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2208 nregs++;
2209
2210 return nregs == 0 || ! frame_pointer_needed;
2211 }
2212
2213 /* This function generates the assembly code for function exit.
2214 FILE is an stdio stream to output the code to.
2215 SIZE is an int: how many units of temporary storage to deallocate. */
2216
2217 void
2218 function_epilogue (file, size)
2219 FILE *file ATTRIBUTE_UNUSED;
2220 int size ATTRIBUTE_UNUSED;
2221 {
2222 return;
2223 }
2224
2225 /* Restore function stack, frame, and registers. */
2226
2227 void
2228 ix86_expand_epilogue ()
2229 {
2230 ix86_epilogue (1);
2231 }
2232
2233 static void
2234 ix86_epilogue (do_rtl)
2235 int do_rtl;
2236 {
2237 register int regno;
2238 register int nregs, limit;
2239 int offset;
2240 rtx xops[3];
2241 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2242 || current_function_uses_const_pool);
2243 long tsize = get_frame_size ();
2244
2245 /* Compute the number of registers to pop */
2246
2247 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2248
2249 nregs = 0;
2250
2251 for (regno = limit - 1; regno >= 0; regno--)
2252 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2253 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2254 nregs++;
2255
2256 /* sp is often unreliable so we must go off the frame pointer.
2257
2258 In reality, we may not care if sp is unreliable, because we can restore
2259 the register relative to the frame pointer. In theory, since each move
2260 is the same speed as a pop, and we don't need the leal, this is faster.
2261 For now restore multiple registers the old way. */
2262
2263 offset = - tsize - (nregs * UNITS_PER_WORD);
2264
2265 xops[2] = stack_pointer_rtx;
2266
2267 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2268 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2269 moved before any instruction which implicitly uses the got. This
2270 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2271
2272 Alternatively, this could be fixed by making the dependence on the
2273 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2274
2275 if (flag_pic || profile_flag || profile_block_flag)
2276 emit_insn (gen_blockage ());
2277
2278 if (nregs > 1 || ! frame_pointer_needed)
2279 {
2280 if (frame_pointer_needed)
2281 {
2282 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2283 if (do_rtl)
2284 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2285 else
2286 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2287 }
2288
2289 for (regno = 0; regno < limit; regno++)
2290 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2291 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2292 {
2293 xops[0] = gen_rtx_REG (SImode, regno);
2294
2295 if (do_rtl)
2296 emit_insn (gen_pop (xops[0]));
2297 else
2298 output_asm_insn ("pop%L0 %0", xops);
2299 }
2300 }
2301
2302 else
2303 for (regno = 0; regno < limit; regno++)
2304 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2305 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2306 {
2307 xops[0] = gen_rtx_REG (SImode, regno);
2308 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2309
2310 if (do_rtl)
2311 emit_move_insn (xops[0], xops[1]);
2312 else
2313 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2314
2315 offset += 4;
2316 }
2317
2318 if (frame_pointer_needed)
2319 {
2320 /* If not an i386, mov & pop is faster than "leave". */
2321
2322 if (TARGET_USE_LEAVE)
2323 {
2324 if (do_rtl)
2325 emit_insn (gen_leave());
2326 else
2327 output_asm_insn ("leave", xops);
2328 }
2329 else
2330 {
2331 xops[0] = frame_pointer_rtx;
2332 xops[1] = stack_pointer_rtx;
2333
2334 if (do_rtl)
2335 {
2336 emit_insn (gen_epilogue_set_stack_ptr());
2337 emit_insn (gen_pop (xops[0]));
2338 }
2339 else
2340 {
2341 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2342 output_asm_insn ("pop%L0 %0", xops);
2343 }
2344 }
2345 }
2346
2347 else if (tsize)
2348 {
2349 /* If there is no frame pointer, we must still release the frame. */
2350 xops[0] = GEN_INT (tsize);
2351
2352 if (do_rtl)
2353 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2354 gen_rtx (PLUS, SImode, xops[2], xops[0])));
2355 else
2356 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2357 }
2358
2359 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2360 if (profile_block_flag == 2)
2361 {
2362 FUNCTION_BLOCK_PROFILER_EXIT(file);
2363 }
2364 #endif
2365
2366 if (current_function_pops_args && current_function_args_size)
2367 {
2368 xops[1] = GEN_INT (current_function_pops_args);
2369
2370 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2371 asked to pop more, pop return address, do explicit add, and jump
2372 indirectly to the caller. */
2373
2374 if (current_function_pops_args >= 32768)
2375 {
2376 /* ??? Which register to use here? */
2377 xops[0] = gen_rtx_REG (SImode, 2);
2378
2379 if (do_rtl)
2380 {
2381 emit_insn (gen_pop (xops[0]));
2382 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2383 gen_rtx (PLUS, SImode, xops[1], xops[2])));
2384 emit_jump_insn (xops[0]);
2385 }
2386 else
2387 {
2388 output_asm_insn ("pop%L0 %0", xops);
2389 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2390 output_asm_insn ("jmp %*%0", xops);
2391 }
2392 }
2393 else
2394 {
2395 if (do_rtl)
2396 emit_jump_insn (gen_return_pop_internal (xops[1]));
2397 else
2398 output_asm_insn ("ret %1", xops);
2399 }
2400 }
2401 else
2402 {
2403 if (do_rtl)
2404 emit_jump_insn (gen_return_internal ());
2405 else
2406 output_asm_insn ("ret", xops);
2407 }
2408 }
2409 \f
2410 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2411 that is a valid memory address for an instruction.
2412 The MODE argument is the machine mode for the MEM expression
2413 that wants to use this address.
2414
2415 On x86, legitimate addresses are:
2416 base movl (base),reg
2417 displacement movl disp,reg
2418 base + displacement movl disp(base),reg
2419 index + base movl (base,index),reg
2420 (index + base) + displacement movl disp(base,index),reg
2421 index*scale movl (,index,scale),reg
2422 index*scale + disp movl disp(,index,scale),reg
2423 index*scale + base movl (base,index,scale),reg
2424 (index*scale + base) + disp movl disp(base,index,scale),reg
2425
2426 In each case, scale can be 1, 2, 4, 8. */
2427
2428 /* This is exactly the same as print_operand_addr, except that
2429 it recognizes addresses instead of printing them.
2430
2431 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2432 convert common non-canonical forms to canonical form so that they will
2433 be recognized. */
2434
2435 #define ADDR_INVALID(msg,insn) \
2436 do { \
2437 if (TARGET_DEBUG_ADDR) \
2438 { \
2439 fprintf (stderr, msg); \
2440 debug_rtx (insn); \
2441 } \
2442 } while (0)
2443
2444 int
2445 legitimate_address_p (mode, addr, strict)
2446 enum machine_mode mode;
2447 register rtx addr;
2448 int strict;
2449 {
2450 rtx base = NULL_RTX;
2451 rtx indx = NULL_RTX;
2452 rtx scale = NULL_RTX;
2453 rtx disp = NULL_RTX;
2454
2455 if (TARGET_DEBUG_ADDR)
2456 {
2457 fprintf (stderr,
2458 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2459 GET_MODE_NAME (mode), strict);
2460
2461 debug_rtx (addr);
2462 }
2463
2464 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2465 base = addr;
2466
2467 else if (GET_CODE (addr) == PLUS)
2468 {
2469 rtx op0 = XEXP (addr, 0);
2470 rtx op1 = XEXP (addr, 1);
2471 enum rtx_code code0 = GET_CODE (op0);
2472 enum rtx_code code1 = GET_CODE (op1);
2473
2474 if (code0 == REG || code0 == SUBREG)
2475 {
2476 if (code1 == REG || code1 == SUBREG)
2477 {
2478 indx = op0; /* index + base */
2479 base = op1;
2480 }
2481
2482 else
2483 {
2484 base = op0; /* base + displacement */
2485 disp = op1;
2486 }
2487 }
2488
2489 else if (code0 == MULT)
2490 {
2491 indx = XEXP (op0, 0);
2492 scale = XEXP (op0, 1);
2493
2494 if (code1 == REG || code1 == SUBREG)
2495 base = op1; /* index*scale + base */
2496
2497 else
2498 disp = op1; /* index*scale + disp */
2499 }
2500
2501 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2502 {
2503 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2504 scale = XEXP (XEXP (op0, 0), 1);
2505 base = XEXP (op0, 1);
2506 disp = op1;
2507 }
2508
2509 else if (code0 == PLUS)
2510 {
2511 indx = XEXP (op0, 0); /* index + base + disp */
2512 base = XEXP (op0, 1);
2513 disp = op1;
2514 }
2515
2516 else
2517 {
2518 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2519 return FALSE;
2520 }
2521 }
2522
2523 else if (GET_CODE (addr) == MULT)
2524 {
2525 indx = XEXP (addr, 0); /* index*scale */
2526 scale = XEXP (addr, 1);
2527 }
2528
2529 else
2530 disp = addr; /* displacement */
2531
2532 /* Allow arg pointer and stack pointer as index if there is not scaling */
2533 if (base && indx && !scale
2534 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2535 {
2536 rtx tmp = base;
2537 base = indx;
2538 indx = tmp;
2539 }
2540
2541 /* Validate base register:
2542
2543 Don't allow SUBREG's here, it can lead to spill failures when the base
2544 is one word out of a two word structure, which is represented internally
2545 as a DImode int. */
2546
2547 if (base)
2548 {
2549 if (GET_CODE (base) != REG)
2550 {
2551 ADDR_INVALID ("Base is not a register.\n", base);
2552 return FALSE;
2553 }
2554
2555 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2556 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2557 {
2558 ADDR_INVALID ("Base is not valid.\n", base);
2559 return FALSE;
2560 }
2561 }
2562
2563 /* Validate index register:
2564
2565 Don't allow SUBREG's here, it can lead to spill failures when the index
2566 is one word out of a two word structure, which is represented internally
2567 as a DImode int. */
2568 if (indx)
2569 {
2570 if (GET_CODE (indx) != REG)
2571 {
2572 ADDR_INVALID ("Index is not a register.\n", indx);
2573 return FALSE;
2574 }
2575
2576 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2577 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2578 {
2579 ADDR_INVALID ("Index is not valid.\n", indx);
2580 return FALSE;
2581 }
2582 }
2583 else if (scale)
2584 abort (); /* scale w/o index invalid */
2585
2586 /* Validate scale factor: */
2587 if (scale)
2588 {
2589 HOST_WIDE_INT value;
2590
2591 if (GET_CODE (scale) != CONST_INT)
2592 {
2593 ADDR_INVALID ("Scale is not valid.\n", scale);
2594 return FALSE;
2595 }
2596
2597 value = INTVAL (scale);
2598 if (value != 1 && value != 2 && value != 4 && value != 8)
2599 {
2600 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2601 return FALSE;
2602 }
2603 }
2604
2605 /* Validate displacement
2606 Constant pool addresses must be handled special. They are
2607 considered legitimate addresses, but only if not used with regs.
2608 When printed, the output routines know to print the reference with the
2609 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2610 if (disp)
2611 {
2612 if (GET_CODE (disp) == SYMBOL_REF
2613 && CONSTANT_POOL_ADDRESS_P (disp)
2614 && base == 0
2615 && indx == 0)
2616 ;
2617
2618 else if (!CONSTANT_ADDRESS_P (disp))
2619 {
2620 ADDR_INVALID ("Displacement is not valid.\n", disp);
2621 return FALSE;
2622 }
2623
2624 else if (GET_CODE (disp) == CONST_DOUBLE)
2625 {
2626 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2627 return FALSE;
2628 }
2629
2630 else if (flag_pic && SYMBOLIC_CONST (disp)
2631 && base != pic_offset_table_rtx
2632 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2633 {
2634 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2635 return FALSE;
2636 }
2637
2638 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2639 && (base != NULL_RTX || indx != NULL_RTX))
2640 {
2641 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2642 disp);
2643 return FALSE;
2644 }
2645 }
2646
2647 if (TARGET_DEBUG_ADDR)
2648 fprintf (stderr, "Address is valid.\n");
2649
2650 /* Everything looks valid, return true */
2651 return TRUE;
2652 }
2653 \f
2654 /* Return a legitimate reference for ORIG (an address) using the
2655 register REG. If REG is 0, a new pseudo is generated.
2656
2657 There are three types of references that must be handled:
2658
2659 1. Global data references must load the address from the GOT, via
2660 the PIC reg. An insn is emitted to do this load, and the reg is
2661 returned.
2662
2663 2. Static data references must compute the address as an offset
2664 from the GOT, whose base is in the PIC reg. An insn is emitted to
2665 compute the address into a reg, and the reg is returned. Static
2666 data objects have SYMBOL_REF_FLAG set to differentiate them from
2667 global data objects.
2668
2669 3. Constant pool addresses must be handled special. They are
2670 considered legitimate addresses, but only if not used with regs.
2671 When printed, the output routines know to print the reference with the
2672 PIC reg, even though the PIC reg doesn't appear in the RTL.
2673
2674 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2675 reg also appears in the address (except for constant pool references,
2676 noted above).
2677
2678 "switch" statements also require special handling when generating
2679 PIC code. See comments by the `casesi' insn in i386.md for details. */
2680
2681 rtx
2682 legitimize_pic_address (orig, reg)
2683 rtx orig;
2684 rtx reg;
2685 {
2686 rtx addr = orig;
2687 rtx new = orig;
2688
2689 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2690 {
2691 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2692 reg = new = orig;
2693 else
2694 {
2695 if (reg == 0)
2696 reg = gen_reg_rtx (Pmode);
2697
2698 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2699 || GET_CODE (addr) == LABEL_REF)
2700 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2701 else
2702 new = gen_rtx_MEM (Pmode,
2703 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig));
2704
2705 emit_move_insn (reg, new);
2706 }
2707 current_function_uses_pic_offset_table = 1;
2708 return reg;
2709 }
2710
2711 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2712 {
2713 rtx base;
2714
2715 if (GET_CODE (addr) == CONST)
2716 {
2717 addr = XEXP (addr, 0);
2718 if (GET_CODE (addr) != PLUS)
2719 abort ();
2720 }
2721
2722 if (XEXP (addr, 0) == pic_offset_table_rtx)
2723 return orig;
2724
2725 if (reg == 0)
2726 reg = gen_reg_rtx (Pmode);
2727
2728 base = legitimize_pic_address (XEXP (addr, 0), reg);
2729 addr = legitimize_pic_address (XEXP (addr, 1),
2730 base == reg ? NULL_RTX : reg);
2731
2732 if (GET_CODE (addr) == CONST_INT)
2733 return plus_constant (base, INTVAL (addr));
2734
2735 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2736 {
2737 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2738 addr = XEXP (addr, 1);
2739 }
2740
2741 return gen_rtx (PLUS, Pmode, base, addr);
2742 }
2743 return new;
2744 }
2745 \f
2746 /* Emit insns to move operands[1] into operands[0]. */
2747
2748 void
2749 emit_pic_move (operands, mode)
2750 rtx *operands;
2751 enum machine_mode mode ATTRIBUTE_UNUSED;
2752 {
2753 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2754
2755 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2756 operands[1] = force_reg (SImode, operands[1]);
2757 else
2758 operands[1] = legitimize_pic_address (operands[1], temp);
2759 }
2760 \f
2761 /* Try machine-dependent ways of modifying an illegitimate address
2762 to be legitimate. If we find one, return the new, valid address.
2763 This macro is used in only one place: `memory_address' in explow.c.
2764
2765 OLDX is the address as it was before break_out_memory_refs was called.
2766 In some cases it is useful to look at this to decide what needs to be done.
2767
2768 MODE and WIN are passed so that this macro can use
2769 GO_IF_LEGITIMATE_ADDRESS.
2770
2771 It is always safe for this macro to do nothing. It exists to recognize
2772 opportunities to optimize the output.
2773
2774 For the 80386, we handle X+REG by loading X into a register R and
2775 using R+REG. R will go in a general reg and indexing will be used.
2776 However, if REG is a broken-out memory address or multiplication,
2777 nothing needs to be done because REG can certainly go in a general reg.
2778
2779 When -fpic is used, special handling is needed for symbolic references.
2780 See comments by legitimize_pic_address in i386.c for details. */
2781
2782 rtx
2783 legitimize_address (x, oldx, mode)
2784 register rtx x;
2785 register rtx oldx ATTRIBUTE_UNUSED;
2786 enum machine_mode mode;
2787 {
2788 int changed = 0;
2789 unsigned log;
2790
2791 if (TARGET_DEBUG_ADDR)
2792 {
2793 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2794 GET_MODE_NAME (mode));
2795 debug_rtx (x);
2796 }
2797
2798 if (flag_pic && SYMBOLIC_CONST (x))
2799 return legitimize_pic_address (x, 0);
2800
2801 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2802 if (GET_CODE (x) == ASHIFT
2803 && GET_CODE (XEXP (x, 1)) == CONST_INT
2804 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2805 {
2806 changed = 1;
2807 x = gen_rtx (MULT, Pmode, force_reg (Pmode, XEXP (x, 0)),
2808 GEN_INT (1 << log));
2809 }
2810
2811 if (GET_CODE (x) == PLUS)
2812 {
2813 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2814
2815 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2816 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2817 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2818 {
2819 changed = 1;
2820 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2821 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2822 GEN_INT (1 << log));
2823 }
2824
2825 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2826 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2827 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2828 {
2829 changed = 1;
2830 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2831 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2832 GEN_INT (1 << log));
2833 }
2834
2835 /* Put multiply first if it isn't already. */
2836 if (GET_CODE (XEXP (x, 1)) == MULT)
2837 {
2838 rtx tmp = XEXP (x, 0);
2839 XEXP (x, 0) = XEXP (x, 1);
2840 XEXP (x, 1) = tmp;
2841 changed = 1;
2842 }
2843
2844 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2845 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2846 created by virtual register instantiation, register elimination, and
2847 similar optimizations. */
2848 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2849 {
2850 changed = 1;
2851 x = gen_rtx (PLUS, Pmode,
2852 gen_rtx (PLUS, Pmode, XEXP (x, 0),
2853 XEXP (XEXP (x, 1), 0)),
2854 XEXP (XEXP (x, 1), 1));
2855 }
2856
2857 /* Canonicalize
2858 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2859 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2860 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2861 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2862 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2863 && CONSTANT_P (XEXP (x, 1)))
2864 {
2865 rtx constant;
2866 rtx other = NULL_RTX;
2867
2868 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2869 {
2870 constant = XEXP (x, 1);
2871 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2872 }
2873 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2874 {
2875 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2876 other = XEXP (x, 1);
2877 }
2878 else
2879 constant = 0;
2880
2881 if (constant)
2882 {
2883 changed = 1;
2884 x = gen_rtx (PLUS, Pmode,
2885 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2886 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2887 plus_constant (other, INTVAL (constant)));
2888 }
2889 }
2890
2891 if (changed && legitimate_address_p (mode, x, FALSE))
2892 return x;
2893
2894 if (GET_CODE (XEXP (x, 0)) == MULT)
2895 {
2896 changed = 1;
2897 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2898 }
2899
2900 if (GET_CODE (XEXP (x, 1)) == MULT)
2901 {
2902 changed = 1;
2903 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2904 }
2905
2906 if (changed
2907 && GET_CODE (XEXP (x, 1)) == REG
2908 && GET_CODE (XEXP (x, 0)) == REG)
2909 return x;
2910
2911 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2912 {
2913 changed = 1;
2914 x = legitimize_pic_address (x, 0);
2915 }
2916
2917 if (changed && legitimate_address_p (mode, x, FALSE))
2918 return x;
2919
2920 if (GET_CODE (XEXP (x, 0)) == REG)
2921 {
2922 register rtx temp = gen_reg_rtx (Pmode);
2923 register rtx val = force_operand (XEXP (x, 1), temp);
2924 if (val != temp)
2925 emit_move_insn (temp, val);
2926
2927 XEXP (x, 1) = temp;
2928 return x;
2929 }
2930
2931 else if (GET_CODE (XEXP (x, 1)) == REG)
2932 {
2933 register rtx temp = gen_reg_rtx (Pmode);
2934 register rtx val = force_operand (XEXP (x, 0), temp);
2935 if (val != temp)
2936 emit_move_insn (temp, val);
2937
2938 XEXP (x, 0) = temp;
2939 return x;
2940 }
2941 }
2942
2943 return x;
2944 }
2945 \f
2946 /* Print an integer constant expression in assembler syntax. Addition
2947 and subtraction are the only arithmetic that may appear in these
2948 expressions. FILE is the stdio stream to write to, X is the rtx, and
2949 CODE is the operand print code from the output string. */
2950
2951 static void
2952 output_pic_addr_const (file, x, code)
2953 FILE *file;
2954 rtx x;
2955 int code;
2956 {
2957 char buf[256];
2958
2959 switch (GET_CODE (x))
2960 {
2961 case PC:
2962 if (flag_pic)
2963 putc ('.', file);
2964 else
2965 abort ();
2966 break;
2967
2968 case SYMBOL_REF:
2969 case LABEL_REF:
2970 if (GET_CODE (x) == SYMBOL_REF)
2971 assemble_name (file, XSTR (x, 0));
2972 else
2973 {
2974 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2975 CODE_LABEL_NUMBER (XEXP (x, 0)));
2976 assemble_name (asm_out_file, buf);
2977 }
2978
2979 if (code == 'X')
2980 ; /* No suffix, dammit. */
2981 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2982 fprintf (file, "@GOTOFF(%%ebx)");
2983 else if (code == 'P')
2984 fprintf (file, "@PLT");
2985 else if (GET_CODE (x) == LABEL_REF)
2986 fprintf (file, "@GOTOFF");
2987 else if (! SYMBOL_REF_FLAG (x))
2988 fprintf (file, "@GOT");
2989 else
2990 fprintf (file, "@GOTOFF");
2991
2992 break;
2993
2994 case CODE_LABEL:
2995 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2996 assemble_name (asm_out_file, buf);
2997 break;
2998
2999 case CONST_INT:
3000 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3001 break;
3002
3003 case CONST:
3004 /* This used to output parentheses around the expression,
3005 but that does not work on the 386 (either ATT or BSD assembler). */
3006 output_pic_addr_const (file, XEXP (x, 0), code);
3007 break;
3008
3009 case CONST_DOUBLE:
3010 if (GET_MODE (x) == VOIDmode)
3011 {
3012 /* We can use %d if the number is <32 bits and positive. */
3013 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3014 fprintf (file, "0x%lx%08lx",
3015 (unsigned long) CONST_DOUBLE_HIGH (x),
3016 (unsigned long) CONST_DOUBLE_LOW (x));
3017 else
3018 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3019 }
3020 else
3021 /* We can't handle floating point constants;
3022 PRINT_OPERAND must handle them. */
3023 output_operand_lossage ("floating constant misused");
3024 break;
3025
3026 case PLUS:
3027 /* Some assemblers need integer constants to appear first. */
3028 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3029 {
3030 output_pic_addr_const (file, XEXP (x, 0), code);
3031 if (INTVAL (XEXP (x, 1)) >= 0)
3032 fprintf (file, "+");
3033 output_pic_addr_const (file, XEXP (x, 1), code);
3034 }
3035 else
3036 {
3037 output_pic_addr_const (file, XEXP (x, 1), code);
3038 if (INTVAL (XEXP (x, 0)) >= 0)
3039 fprintf (file, "+");
3040 output_pic_addr_const (file, XEXP (x, 0), code);
3041 }
3042 break;
3043
3044 case MINUS:
3045 output_pic_addr_const (file, XEXP (x, 0), code);
3046 fprintf (file, "-");
3047 output_pic_addr_const (file, XEXP (x, 1), code);
3048 break;
3049
3050 default:
3051 output_operand_lossage ("invalid expression as operand");
3052 }
3053 }
3054 \f
3055 /* Append the correct conditional move suffix which corresponds to CODE. */
3056
3057 static void
3058 put_condition_code (code, reverse_cc, mode, file)
3059 enum rtx_code code;
3060 int reverse_cc;
3061 enum mode_class mode;
3062 FILE * file;
3063 {
3064 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3065 && ! (cc_prev_status.flags & CC_FCOMI));
3066 if (reverse_cc && ! ieee)
3067 code = reverse_condition (code);
3068
3069 if (mode == MODE_INT)
3070 switch (code)
3071 {
3072 case NE:
3073 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3074 fputs ("b", file);
3075 else
3076 fputs ("ne", file);
3077 return;
3078
3079 case EQ:
3080 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3081 fputs ("ae", file);
3082 else
3083 fputs ("e", file);
3084 return;
3085
3086 case GE:
3087 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3088 fputs ("ns", file);
3089 else
3090 fputs ("ge", file);
3091 return;
3092
3093 case GT:
3094 fputs ("g", file);
3095 return;
3096
3097 case LE:
3098 fputs ("le", file);
3099 return;
3100
3101 case LT:
3102 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3103 fputs ("s", file);
3104 else
3105 fputs ("l", file);
3106 return;
3107
3108 case GEU:
3109 fputs ("ae", file);
3110 return;
3111
3112 case GTU:
3113 fputs ("a", file);
3114 return;
3115
3116 case LEU:
3117 fputs ("be", file);
3118 return;
3119
3120 case LTU:
3121 fputs ("b", file);
3122 return;
3123
3124 default:
3125 output_operand_lossage ("Invalid %%C operand");
3126 }
3127
3128 else if (mode == MODE_FLOAT)
3129 switch (code)
3130 {
3131 case NE:
3132 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3133 return;
3134 case EQ:
3135 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3136 return;
3137 case GE:
3138 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3139 return;
3140 case GT:
3141 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3142 return;
3143 case LE:
3144 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3145 return;
3146 case LT:
3147 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3148 return;
3149 case GEU:
3150 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3151 return;
3152 case GTU:
3153 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3154 return;
3155 case LEU:
3156 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3157 return;
3158 case LTU:
3159 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3160 return;
3161 default:
3162 output_operand_lossage ("Invalid %%C operand");
3163 }
3164 }
3165
3166 /* Meaning of CODE:
3167 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3168 C -- print opcode suffix for set/cmov insn.
3169 c -- like C, but print reversed condition
3170 F -- print opcode suffix for fcmov insn.
3171 f -- like C, but print reversed condition
3172 R -- print the prefix for register names.
3173 z -- print the opcode suffix for the size of the current operand.
3174 * -- print a star (in certain assembler syntax)
3175 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3176 c -- don't print special prefixes before constant operands.
3177 J -- print the appropriate jump operand.
3178 s -- print a shift double count, followed by the assemblers argument
3179 delimiter.
3180 b -- print the QImode name of the register for the indicated operand.
3181 %b0 would print %al if operands[0] is reg 0.
3182 w -- likewise, print the HImode name of the register.
3183 k -- likewise, print the SImode name of the register.
3184 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3185 y -- print "st(0)" instead of "st" as a register.
3186 P -- print as a PIC constant */
3187
3188 void
3189 print_operand (file, x, code)
3190 FILE *file;
3191 rtx x;
3192 int code;
3193 {
3194 if (code)
3195 {
3196 switch (code)
3197 {
3198 case '*':
3199 if (USE_STAR)
3200 putc ('*', file);
3201 return;
3202
3203 case 'L':
3204 PUT_OP_SIZE (code, 'l', file);
3205 return;
3206
3207 case 'W':
3208 PUT_OP_SIZE (code, 'w', file);
3209 return;
3210
3211 case 'B':
3212 PUT_OP_SIZE (code, 'b', file);
3213 return;
3214
3215 case 'Q':
3216 PUT_OP_SIZE (code, 'l', file);
3217 return;
3218
3219 case 'S':
3220 PUT_OP_SIZE (code, 's', file);
3221 return;
3222
3223 case 'T':
3224 PUT_OP_SIZE (code, 't', file);
3225 return;
3226
3227 case 'z':
3228 /* 387 opcodes don't get size suffixes if the operands are
3229 registers. */
3230
3231 if (STACK_REG_P (x))
3232 return;
3233
3234 /* this is the size of op from size of operand */
3235 switch (GET_MODE_SIZE (GET_MODE (x)))
3236 {
3237 case 1:
3238 PUT_OP_SIZE ('B', 'b', file);
3239 return;
3240
3241 case 2:
3242 PUT_OP_SIZE ('W', 'w', file);
3243 return;
3244
3245 case 4:
3246 if (GET_MODE (x) == SFmode)
3247 {
3248 PUT_OP_SIZE ('S', 's', file);
3249 return;
3250 }
3251 else
3252 PUT_OP_SIZE ('L', 'l', file);
3253 return;
3254
3255 case 12:
3256 PUT_OP_SIZE ('T', 't', file);
3257 return;
3258
3259 case 8:
3260 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3261 {
3262 #ifdef GAS_MNEMONICS
3263 PUT_OP_SIZE ('Q', 'q', file);
3264 return;
3265 #else
3266 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3267 #endif
3268 }
3269
3270 PUT_OP_SIZE ('Q', 'l', file);
3271 return;
3272 }
3273
3274 case 'b':
3275 case 'w':
3276 case 'k':
3277 case 'h':
3278 case 'y':
3279 case 'P':
3280 case 'X':
3281 break;
3282
3283 case 'J':
3284 switch (GET_CODE (x))
3285 {
3286 /* These conditions are appropriate for testing the result
3287 of an arithmetic operation, not for a compare operation.
3288 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3289 CC_Z_IN_NOT_C false and not floating point. */
3290 case NE: fputs ("jne", file); return;
3291 case EQ: fputs ("je", file); return;
3292 case GE: fputs ("jns", file); return;
3293 case LT: fputs ("js", file); return;
3294 case GEU: fputs ("jmp", file); return;
3295 case GTU: fputs ("jne", file); return;
3296 case LEU: fputs ("je", file); return;
3297 case LTU: fputs ("#branch never", file); return;
3298
3299 /* no matching branches for GT nor LE */
3300
3301 default:
3302 abort ();
3303 }
3304
3305 case 's':
3306 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3307 {
3308 PRINT_OPERAND (file, x, 0);
3309 fputs (AS2C (,) + 1, file);
3310 }
3311
3312 return;
3313
3314 /* This is used by the conditional move instructions. */
3315 case 'C':
3316 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3317 return;
3318
3319 /* Like above, but reverse condition */
3320 case 'c':
3321 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3322
3323 case 'F':
3324 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3325 return;
3326
3327 /* Like above, but reverse condition */
3328 case 'f':
3329 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3330 return;
3331
3332 default:
3333 {
3334 char str[50];
3335
3336 sprintf (str, "invalid operand code `%c'", code);
3337 output_operand_lossage (str);
3338 }
3339 }
3340 }
3341
3342 if (GET_CODE (x) == REG)
3343 {
3344 PRINT_REG (x, code, file);
3345 }
3346
3347 else if (GET_CODE (x) == MEM)
3348 {
3349 PRINT_PTR (x, file);
3350 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3351 {
3352 if (flag_pic)
3353 output_pic_addr_const (file, XEXP (x, 0), code);
3354 else
3355 output_addr_const (file, XEXP (x, 0));
3356 }
3357 else
3358 output_address (XEXP (x, 0));
3359 }
3360
3361 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3362 {
3363 REAL_VALUE_TYPE r;
3364 long l;
3365
3366 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3367 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3368 PRINT_IMMED_PREFIX (file);
3369 fprintf (file, "0x%lx", l);
3370 }
3371
3372 /* These float cases don't actually occur as immediate operands. */
3373 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3374 {
3375 REAL_VALUE_TYPE r;
3376 char dstr[30];
3377
3378 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3379 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3380 fprintf (file, "%s", dstr);
3381 }
3382
3383 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3384 {
3385 REAL_VALUE_TYPE r;
3386 char dstr[30];
3387
3388 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3389 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3390 fprintf (file, "%s", dstr);
3391 }
3392 else
3393 {
3394 if (code != 'P')
3395 {
3396 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3397 PRINT_IMMED_PREFIX (file);
3398 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3399 || GET_CODE (x) == LABEL_REF)
3400 PRINT_OFFSET_PREFIX (file);
3401 }
3402 if (flag_pic)
3403 output_pic_addr_const (file, x, code);
3404 else
3405 output_addr_const (file, x);
3406 }
3407 }
3408 \f
3409 /* Print a memory operand whose address is ADDR. */
3410
3411 void
3412 print_operand_address (file, addr)
3413 FILE *file;
3414 register rtx addr;
3415 {
3416 register rtx reg1, reg2, breg, ireg;
3417 rtx offset;
3418
3419 switch (GET_CODE (addr))
3420 {
3421 case REG:
3422 ADDR_BEG (file);
3423 fprintf (file, "%se", RP);
3424 fputs (hi_reg_name[REGNO (addr)], file);
3425 ADDR_END (file);
3426 break;
3427
3428 case PLUS:
3429 reg1 = 0;
3430 reg2 = 0;
3431 ireg = 0;
3432 breg = 0;
3433 offset = 0;
3434 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3435 {
3436 offset = XEXP (addr, 0);
3437 addr = XEXP (addr, 1);
3438 }
3439 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3440 {
3441 offset = XEXP (addr, 1);
3442 addr = XEXP (addr, 0);
3443 }
3444
3445 if (GET_CODE (addr) != PLUS)
3446 ;
3447 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3448 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3449 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3450 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3451 else if (GET_CODE (XEXP (addr, 0)) == REG)
3452 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3453 else if (GET_CODE (XEXP (addr, 1)) == REG)
3454 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3455
3456 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3457 {
3458 if (reg1 == 0)
3459 reg1 = addr;
3460 else
3461 reg2 = addr;
3462
3463 addr = 0;
3464 }
3465
3466 if (offset != 0)
3467 {
3468 if (addr != 0)
3469 abort ();
3470 addr = offset;
3471 }
3472
3473 if ((reg1 && GET_CODE (reg1) == MULT)
3474 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3475 {
3476 breg = reg2;
3477 ireg = reg1;
3478 }
3479 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3480 {
3481 breg = reg1;
3482 ireg = reg2;
3483 }
3484
3485 if (ireg != 0 || breg != 0)
3486 {
3487 int scale = 1;
3488
3489 if (addr != 0)
3490 {
3491 if (flag_pic)
3492 output_pic_addr_const (file, addr, 0);
3493 else if (GET_CODE (addr) == LABEL_REF)
3494 output_asm_label (addr);
3495 else
3496 output_addr_const (file, addr);
3497 }
3498
3499 if (ireg != 0 && GET_CODE (ireg) == MULT)
3500 {
3501 scale = INTVAL (XEXP (ireg, 1));
3502 ireg = XEXP (ireg, 0);
3503 }
3504
3505 /* The stack pointer can only appear as a base register,
3506 never an index register, so exchange the regs if it is wrong. */
3507
3508 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3509 {
3510 rtx tmp;
3511
3512 tmp = breg;
3513 breg = ireg;
3514 ireg = tmp;
3515 }
3516
3517 /* output breg+ireg*scale */
3518 PRINT_B_I_S (breg, ireg, scale, file);
3519 break;
3520 }
3521
3522 case MULT:
3523 {
3524 int scale;
3525
3526 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3527 {
3528 scale = INTVAL (XEXP (addr, 0));
3529 ireg = XEXP (addr, 1);
3530 }
3531 else
3532 {
3533 scale = INTVAL (XEXP (addr, 1));
3534 ireg = XEXP (addr, 0);
3535 }
3536
3537 output_addr_const (file, const0_rtx);
3538 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3539 }
3540 break;
3541
3542 default:
3543 if (GET_CODE (addr) == CONST_INT
3544 && INTVAL (addr) < 0x8000
3545 && INTVAL (addr) >= -0x8000)
3546 fprintf (file, "%d", (int) INTVAL (addr));
3547 else
3548 {
3549 if (flag_pic)
3550 output_pic_addr_const (file, addr, 0);
3551 else
3552 output_addr_const (file, addr);
3553 }
3554 }
3555 }
3556 \f
3557 /* Set the cc_status for the results of an insn whose pattern is EXP.
3558 On the 80386, we assume that only test and compare insns, as well
3559 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3560 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3561 Also, we assume that jumps, moves and sCOND don't affect the condition
3562 codes. All else clobbers the condition codes, by assumption.
3563
3564 We assume that ALL integer add, minus, etc. instructions effect the
3565 condition codes. This MUST be consistent with i386.md.
3566
3567 We don't record any float test or compare - the redundant test &
3568 compare check in final.c does not handle stack-like regs correctly. */
3569
3570 void
3571 notice_update_cc (exp)
3572 rtx exp;
3573 {
3574 if (GET_CODE (exp) == SET)
3575 {
3576 /* Jumps do not alter the cc's. */
3577 if (SET_DEST (exp) == pc_rtx)
3578 return;
3579
3580 /* Moving register or memory into a register:
3581 it doesn't alter the cc's, but it might invalidate
3582 the RTX's which we remember the cc's came from.
3583 (Note that moving a constant 0 or 1 MAY set the cc's). */
3584 if (REG_P (SET_DEST (exp))
3585 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3586 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3587 || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3588 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3589 {
3590 if (cc_status.value1
3591 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3592 cc_status.value1 = 0;
3593
3594 if (cc_status.value2
3595 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3596 cc_status.value2 = 0;
3597
3598 return;
3599 }
3600
3601 /* Moving register into memory doesn't alter the cc's.
3602 It may invalidate the RTX's which we remember the cc's came from. */
3603 if (GET_CODE (SET_DEST (exp)) == MEM
3604 && (REG_P (SET_SRC (exp))
3605 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3606 {
3607 if (cc_status.value1
3608 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3609 cc_status.value1 = 0;
3610 if (cc_status.value2
3611 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3612 cc_status.value2 = 0;
3613
3614 return;
3615 }
3616
3617 /* Function calls clobber the cc's. */
3618 else if (GET_CODE (SET_SRC (exp)) == CALL)
3619 {
3620 CC_STATUS_INIT;
3621 return;
3622 }
3623
3624 /* Tests and compares set the cc's in predictable ways. */
3625 else if (SET_DEST (exp) == cc0_rtx)
3626 {
3627 CC_STATUS_INIT;
3628 cc_status.value1 = SET_SRC (exp);
3629 return;
3630 }
3631
3632 /* Certain instructions effect the condition codes. */
3633 else if (GET_MODE (SET_SRC (exp)) == SImode
3634 || GET_MODE (SET_SRC (exp)) == HImode
3635 || GET_MODE (SET_SRC (exp)) == QImode)
3636 switch (GET_CODE (SET_SRC (exp)))
3637 {
3638 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3639 /* Shifts on the 386 don't set the condition codes if the
3640 shift count is zero. */
3641 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3642 {
3643 CC_STATUS_INIT;
3644 break;
3645 }
3646
3647 /* We assume that the CONST_INT is non-zero (this rtx would
3648 have been deleted if it were zero. */
3649
3650 case PLUS: case MINUS: case NEG:
3651 case AND: case IOR: case XOR:
3652 cc_status.flags = CC_NO_OVERFLOW;
3653 cc_status.value1 = SET_SRC (exp);
3654 cc_status.value2 = SET_DEST (exp);
3655 break;
3656
3657 /* This is the bsf pattern used by ffs. */
3658 case UNSPEC:
3659 if (XINT (SET_SRC (exp), 1) == 5)
3660 {
3661 /* Only the Z flag is defined after bsf. */
3662 cc_status.flags
3663 = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3664 cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3665 cc_status.value2 = 0;
3666 break;
3667 }
3668 /* FALLTHRU */
3669
3670 default:
3671 CC_STATUS_INIT;
3672 }
3673 else
3674 {
3675 CC_STATUS_INIT;
3676 }
3677 }
3678 else if (GET_CODE (exp) == PARALLEL
3679 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3680 {
3681 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3682 return;
3683 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3684
3685 {
3686 CC_STATUS_INIT;
3687 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3688 {
3689 cc_status.flags |= CC_IN_80387;
3690 if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3691 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3692 cc_status.flags |= CC_FCOMI;
3693 }
3694 else
3695 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3696 return;
3697 }
3698
3699 CC_STATUS_INIT;
3700 }
3701 else
3702 {
3703 CC_STATUS_INIT;
3704 }
3705 }
3706 \f
3707 /* Split one or more DImode RTL references into pairs of SImode
3708 references. The RTL can be REG, offsettable MEM, integer constant, or
3709 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3710 split and "num" is its length. lo_half and hi_half are output arrays
3711 that parallel "operands". */
3712
3713 void
3714 split_di (operands, num, lo_half, hi_half)
3715 rtx operands[];
3716 int num;
3717 rtx lo_half[], hi_half[];
3718 {
3719 while (num--)
3720 {
3721 rtx op = operands[num];
3722 if (GET_CODE (op) == REG)
3723 {
3724 lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
3725 hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
3726 }
3727 else if (CONSTANT_P (op))
3728 split_double (op, &lo_half[num], &hi_half[num]);
3729 else if (offsettable_memref_p (op))
3730 {
3731 rtx lo_addr = XEXP (op, 0);
3732 rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
3733 lo_half[num] = change_address (op, SImode, lo_addr);
3734 hi_half[num] = change_address (op, SImode, hi_addr);
3735 }
3736 else
3737 abort();
3738 }
3739 }
3740 \f
3741 /* Return 1 if this is a valid binary operation on a 387.
3742 OP is the expression matched, and MODE is its mode. */
3743
3744 int
3745 binary_387_op (op, mode)
3746 register rtx op;
3747 enum machine_mode mode;
3748 {
3749 if (mode != VOIDmode && mode != GET_MODE (op))
3750 return 0;
3751
3752 switch (GET_CODE (op))
3753 {
3754 case PLUS:
3755 case MINUS:
3756 case MULT:
3757 case DIV:
3758 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3759
3760 default:
3761 return 0;
3762 }
3763 }
3764 \f
3765 /* Return 1 if this is a valid shift or rotate operation on a 386.
3766 OP is the expression matched, and MODE is its mode. */
3767
3768 int
3769 shift_op (op, mode)
3770 register rtx op;
3771 enum machine_mode mode;
3772 {
3773 rtx operand = XEXP (op, 0);
3774
3775 if (mode != VOIDmode && mode != GET_MODE (op))
3776 return 0;
3777
3778 if (GET_MODE (operand) != GET_MODE (op)
3779 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3780 return 0;
3781
3782 return (GET_CODE (op) == ASHIFT
3783 || GET_CODE (op) == ASHIFTRT
3784 || GET_CODE (op) == LSHIFTRT
3785 || GET_CODE (op) == ROTATE
3786 || GET_CODE (op) == ROTATERT);
3787 }
3788
3789 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3790 MODE is not used. */
3791
3792 int
3793 VOIDmode_compare_op (op, mode)
3794 register rtx op;
3795 enum machine_mode mode ATTRIBUTE_UNUSED;
3796 {
3797 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3798 }
3799 \f
3800 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3801 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3802 is the expression of the binary operation. The output may either be
3803 emitted here, or returned to the caller, like all output_* functions.
3804
3805 There is no guarantee that the operands are the same mode, as they
3806 might be within FLOAT or FLOAT_EXTEND expressions. */
3807
3808 char *
3809 output_387_binary_op (insn, operands)
3810 rtx insn;
3811 rtx *operands;
3812 {
3813 rtx temp;
3814 char *base_op;
3815 static char buf[100];
3816
3817 switch (GET_CODE (operands[3]))
3818 {
3819 case PLUS:
3820 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3821 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3822 base_op = "fiadd";
3823 else
3824 base_op = "fadd";
3825 break;
3826
3827 case MINUS:
3828 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3829 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3830 base_op = "fisub";
3831 else
3832 base_op = "fsub";
3833 break;
3834
3835 case MULT:
3836 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3837 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3838 base_op = "fimul";
3839 else
3840 base_op = "fmul";
3841 break;
3842
3843 case DIV:
3844 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3845 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3846 base_op = "fidiv";
3847 else
3848 base_op = "fdiv";
3849 break;
3850
3851 default:
3852 abort ();
3853 }
3854
3855 strcpy (buf, base_op);
3856
3857 switch (GET_CODE (operands[3]))
3858 {
3859 case MULT:
3860 case PLUS:
3861 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3862 {
3863 temp = operands[2];
3864 operands[2] = operands[1];
3865 operands[1] = temp;
3866 }
3867
3868 if (GET_CODE (operands[2]) == MEM)
3869 return strcat (buf, AS1 (%z2,%2));
3870
3871 if (NON_STACK_REG_P (operands[1]))
3872 {
3873 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3874 return "";
3875 }
3876
3877 else if (NON_STACK_REG_P (operands[2]))
3878 {
3879 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3880 return "";
3881 }
3882
3883 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3884 {
3885 if (STACK_TOP_P (operands[0]))
3886 return strcat (buf, AS2 (p,%0,%2));
3887 else
3888 return strcat (buf, AS2 (p,%2,%0));
3889 }
3890
3891 if (STACK_TOP_P (operands[0]))
3892 return strcat (buf, AS2C (%y2,%0));
3893 else
3894 return strcat (buf, AS2C (%2,%0));
3895
3896 case MINUS:
3897 case DIV:
3898 if (GET_CODE (operands[1]) == MEM)
3899 return strcat (buf, AS1 (r%z1,%1));
3900
3901 if (GET_CODE (operands[2]) == MEM)
3902 return strcat (buf, AS1 (%z2,%2));
3903
3904 if (NON_STACK_REG_P (operands[1]))
3905 {
3906 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3907 return "";
3908 }
3909
3910 else if (NON_STACK_REG_P (operands[2]))
3911 {
3912 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3913 return "";
3914 }
3915
3916 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3917 abort ();
3918
3919 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3920 {
3921 if (STACK_TOP_P (operands[0]))
3922 return strcat (buf, AS2 (p,%0,%2));
3923 else
3924 return strcat (buf, AS2 (rp,%2,%0));
3925 }
3926
3927 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3928 {
3929 if (STACK_TOP_P (operands[0]))
3930 return strcat (buf, AS2 (rp,%0,%1));
3931 else
3932 return strcat (buf, AS2 (p,%1,%0));
3933 }
3934
3935 if (STACK_TOP_P (operands[0]))
3936 {
3937 if (STACK_TOP_P (operands[1]))
3938 return strcat (buf, AS2C (%y2,%0));
3939 else
3940 return strcat (buf, AS2 (r,%y1,%0));
3941 }
3942 else if (STACK_TOP_P (operands[1]))
3943 return strcat (buf, AS2C (%1,%0));
3944 else
3945 return strcat (buf, AS2 (r,%2,%0));
3946
3947 default:
3948 abort ();
3949 }
3950 }
3951 \f
3952 /* Output code for INSN to convert a float to a signed int. OPERANDS
3953 are the insn operands. The output may be SFmode or DFmode and the
3954 input operand may be SImode or DImode. As a special case, make sure
3955 that the 387 stack top dies if the output mode is DImode, because the
3956 hardware requires this. */
3957
3958 char *
3959 output_fix_trunc (insn, operands)
3960 rtx insn;
3961 rtx *operands;
3962 {
3963 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3964 rtx xops[2];
3965
3966 if (! STACK_TOP_P (operands[1]))
3967 abort ();
3968
3969 xops[0] = GEN_INT (12);
3970 xops[1] = operands[4];
3971
3972 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3973 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3974 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3975 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3976 output_asm_insn (AS1 (fldc%W3,%3), operands);
3977
3978 if (NON_STACK_REG_P (operands[0]))
3979 output_to_reg (operands[0], stack_top_dies, operands[3]);
3980
3981 else if (GET_CODE (operands[0]) == MEM)
3982 {
3983 if (stack_top_dies)
3984 output_asm_insn (AS1 (fistp%z0,%0), operands);
3985 else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
3986 {
3987 /* There is no DImode version of this without a stack pop, so
3988 we must emulate it. It doesn't matter much what the second
3989 instruction is, because the value being pushed on the FP stack
3990 is not used except for the following stack popping store.
3991 This case can only happen without optimization, so it doesn't
3992 matter that it is inefficient. */
3993 output_asm_insn (AS1 (fistp%z0,%0), operands);
3994 output_asm_insn (AS1 (fild%z0,%0), operands);
3995 }
3996 else
3997 output_asm_insn (AS1 (fist%z0,%0), operands);
3998 }
3999 else
4000 abort ();
4001
4002 return AS1 (fldc%W2,%2);
4003 }
4004 \f
4005 /* Output code for INSN to compare OPERANDS. The two operands might
4006 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4007 expression. If the compare is in mode CCFPEQmode, use an opcode that
4008 will not fault if a qNaN is present. */
4009
4010 char *
4011 output_float_compare (insn, operands)
4012 rtx insn;
4013 rtx *operands;
4014 {
4015 int stack_top_dies;
4016 rtx body = XVECEXP (PATTERN (insn), 0, 0);
4017 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4018 rtx tmp;
4019
4020 if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4021 {
4022 cc_status.flags |= CC_FCOMI;
4023 cc_prev_status.flags &= ~CC_TEST_AX;
4024 }
4025
4026 if (! STACK_TOP_P (operands[0]))
4027 {
4028 tmp = operands[0];
4029 operands[0] = operands[1];
4030 operands[1] = tmp;
4031 cc_status.flags |= CC_REVERSED;
4032 }
4033
4034 if (! STACK_TOP_P (operands[0]))
4035 abort ();
4036
4037 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4038
4039 if (STACK_REG_P (operands[1])
4040 && stack_top_dies
4041 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4042 && REGNO (operands[1]) != FIRST_STACK_REG)
4043 {
4044 /* If both the top of the 387 stack dies, and the other operand
4045 is also a stack register that dies, then this must be a
4046 `fcompp' float compare */
4047
4048 if (unordered_compare)
4049 {
4050 if (cc_status.flags & CC_FCOMI)
4051 {
4052 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4053 output_asm_insn (AS1 (fstp, %y0), operands);
4054 return "";
4055 }
4056 else
4057 output_asm_insn ("fucompp", operands);
4058 }
4059 else
4060 {
4061 if (cc_status.flags & CC_FCOMI)
4062 {
4063 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4064 output_asm_insn (AS1 (fstp, %y0), operands);
4065 return "";
4066 }
4067 else
4068 output_asm_insn ("fcompp", operands);
4069 }
4070 }
4071 else
4072 {
4073 static char buf[100];
4074
4075 /* Decide if this is the integer or float compare opcode, or the
4076 unordered float compare. */
4077
4078 if (unordered_compare)
4079 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4080 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4081 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4082 else
4083 strcpy (buf, "ficom");
4084
4085 /* Modify the opcode if the 387 stack is to be popped. */
4086
4087 if (stack_top_dies)
4088 strcat (buf, "p");
4089
4090 if (NON_STACK_REG_P (operands[1]))
4091 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4092 else if (cc_status.flags & CC_FCOMI)
4093 {
4094 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4095 return "";
4096 }
4097 else
4098 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4099 }
4100
4101 /* Now retrieve the condition code. */
4102
4103 return output_fp_cc0_set (insn);
4104 }
4105 \f
4106 /* Output opcodes to transfer the results of FP compare or test INSN
4107 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4108 result of the compare or test is unordered, no comparison operator
4109 succeeds except NE. Return an output template, if any. */
4110
4111 char *
4112 output_fp_cc0_set (insn)
4113 rtx insn;
4114 {
4115 rtx xops[3];
4116 rtx next;
4117 enum rtx_code code;
4118
4119 xops[0] = gen_rtx_REG (HImode, 0);
4120 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4121
4122 if (! TARGET_IEEE_FP)
4123 {
4124 if (!(cc_status.flags & CC_REVERSED))
4125 {
4126 next = next_cc0_user (insn);
4127
4128 if (GET_CODE (next) == JUMP_INSN
4129 && GET_CODE (PATTERN (next)) == SET
4130 && SET_DEST (PATTERN (next)) == pc_rtx
4131 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4132 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4133 else if (GET_CODE (PATTERN (next)) == SET)
4134 code = GET_CODE (SET_SRC (PATTERN (next)));
4135 else
4136 return "sahf";
4137
4138 if (code == GT || code == LT || code == EQ || code == NE
4139 || code == LE || code == GE)
4140 {
4141 /* We will test eax directly. */
4142 cc_status.flags |= CC_TEST_AX;
4143 return "";
4144 }
4145 }
4146
4147 return "sahf";
4148 }
4149
4150 next = next_cc0_user (insn);
4151 if (next == NULL_RTX)
4152 abort ();
4153
4154 if (GET_CODE (next) == JUMP_INSN
4155 && GET_CODE (PATTERN (next)) == SET
4156 && SET_DEST (PATTERN (next)) == pc_rtx
4157 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4158 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4159 else if (GET_CODE (PATTERN (next)) == SET)
4160 {
4161 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4162 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4163 else
4164 code = GET_CODE (SET_SRC (PATTERN (next)));
4165 }
4166
4167 else if (GET_CODE (PATTERN (next)) == PARALLEL
4168 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4169 {
4170 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4171 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4172 else
4173 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4174 }
4175 else
4176 abort ();
4177
4178 xops[0] = gen_rtx_REG (QImode, 0);
4179
4180 switch (code)
4181 {
4182 case GT:
4183 xops[1] = GEN_INT (0x45);
4184 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4185 /* je label */
4186 break;
4187
4188 case LT:
4189 xops[1] = GEN_INT (0x45);
4190 xops[2] = GEN_INT (0x01);
4191 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4192 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4193 /* je label */
4194 break;
4195
4196 case GE:
4197 xops[1] = GEN_INT (0x05);
4198 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4199 /* je label */
4200 break;
4201
4202 case LE:
4203 xops[1] = GEN_INT (0x45);
4204 xops[2] = GEN_INT (0x40);
4205 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4206 output_asm_insn (AS1 (dec%B0,%h0), xops);
4207 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4208 /* jb label */
4209 break;
4210
4211 case EQ:
4212 xops[1] = GEN_INT (0x45);
4213 xops[2] = GEN_INT (0x40);
4214 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4215 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4216 /* je label */
4217 break;
4218
4219 case NE:
4220 xops[1] = GEN_INT (0x44);
4221 xops[2] = GEN_INT (0x40);
4222 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4223 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4224 /* jne label */
4225 break;
4226
4227 case GTU:
4228 case LTU:
4229 case GEU:
4230 case LEU:
4231 default:
4232 abort ();
4233 }
4234
4235 return "";
4236 }
4237 \f
4238 #define MAX_386_STACK_LOCALS 2
4239
4240 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4241
4242 /* Define the structure for the machine field in struct function. */
4243 struct machine_function
4244 {
4245 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4246 rtx pic_label_rtx;
4247 char pic_label_name[256];
4248 };
4249
4250 /* Functions to save and restore i386_stack_locals.
4251 These will be called, via pointer variables,
4252 from push_function_context and pop_function_context. */
4253
4254 void
4255 save_386_machine_status (p)
4256 struct function *p;
4257 {
4258 p->machine
4259 = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4260 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4261 sizeof i386_stack_locals);
4262 p->machine->pic_label_rtx = pic_label_rtx;
4263 bcopy (pic_label_name, p->machine->pic_label_name, 256);
4264 }
4265
4266 void
4267 restore_386_machine_status (p)
4268 struct function *p;
4269 {
4270 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4271 sizeof i386_stack_locals);
4272 pic_label_rtx = p->machine->pic_label_rtx;
4273 bcopy (p->machine->pic_label_name, pic_label_name, 256);
4274 free (p->machine);
4275 p->machine = NULL;
4276 }
4277
4278 /* Clear stack slot assignments remembered from previous functions.
4279 This is called from INIT_EXPANDERS once before RTL is emitted for each
4280 function. */
4281
4282 void
4283 clear_386_stack_locals ()
4284 {
4285 enum machine_mode mode;
4286 int n;
4287
4288 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4289 mode = (enum machine_mode) ((int) mode + 1))
4290 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4291 i386_stack_locals[(int) mode][n] = NULL_RTX;
4292
4293 pic_label_rtx = NULL_RTX;
4294 bzero (pic_label_name, 256);
4295 /* Arrange to save and restore i386_stack_locals around nested functions. */
4296 save_machine_status = save_386_machine_status;
4297 restore_machine_status = restore_386_machine_status;
4298 }
4299
4300 /* Return a MEM corresponding to a stack slot with mode MODE.
4301 Allocate a new slot if necessary.
4302
4303 The RTL for a function can have several slots available: N is
4304 which slot to use. */
4305
4306 rtx
4307 assign_386_stack_local (mode, n)
4308 enum machine_mode mode;
4309 int n;
4310 {
4311 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4312 abort ();
4313
4314 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4315 i386_stack_locals[(int) mode][n]
4316 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4317
4318 return i386_stack_locals[(int) mode][n];
4319 }
4320 \f
4321 int is_mul(op,mode)
4322 register rtx op;
4323 enum machine_mode mode ATTRIBUTE_UNUSED;
4324 {
4325 return (GET_CODE (op) == MULT);
4326 }
4327
4328 int is_div(op,mode)
4329 register rtx op;
4330 enum machine_mode mode ATTRIBUTE_UNUSED;
4331 {
4332 return (GET_CODE (op) == DIV);
4333 }
4334 \f
4335 #ifdef NOTYET
4336 /* Create a new copy of an rtx.
4337 Recursively copies the operands of the rtx,
4338 except for those few rtx codes that are sharable.
4339 Doesn't share CONST */
4340
4341 rtx
4342 copy_all_rtx (orig)
4343 register rtx orig;
4344 {
4345 register rtx copy;
4346 register int i, j;
4347 register RTX_CODE code;
4348 register char *format_ptr;
4349
4350 code = GET_CODE (orig);
4351
4352 switch (code)
4353 {
4354 case REG:
4355 case QUEUED:
4356 case CONST_INT:
4357 case CONST_DOUBLE:
4358 case SYMBOL_REF:
4359 case CODE_LABEL:
4360 case PC:
4361 case CC0:
4362 case SCRATCH:
4363 /* SCRATCH must be shared because they represent distinct values. */
4364 return orig;
4365
4366 #if 0
4367 case CONST:
4368 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4369 a LABEL_REF, it isn't sharable. */
4370 if (GET_CODE (XEXP (orig, 0)) == PLUS
4371 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4372 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4373 return orig;
4374 break;
4375 #endif
4376 /* A MEM with a constant address is not sharable. The problem is that
4377 the constant address may need to be reloaded. If the mem is shared,
4378 then reloading one copy of this mem will cause all copies to appear
4379 to have been reloaded. */
4380 }
4381
4382 copy = rtx_alloc (code);
4383 PUT_MODE (copy, GET_MODE (orig));
4384 copy->in_struct = orig->in_struct;
4385 copy->volatil = orig->volatil;
4386 copy->unchanging = orig->unchanging;
4387 copy->integrated = orig->integrated;
4388 /* intel1 */
4389 copy->is_spill_rtx = orig->is_spill_rtx;
4390
4391 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4392
4393 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4394 {
4395 switch (*format_ptr++)
4396 {
4397 case 'e':
4398 XEXP (copy, i) = XEXP (orig, i);
4399 if (XEXP (orig, i) != NULL)
4400 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4401 break;
4402
4403 case '0':
4404 case 'u':
4405 XEXP (copy, i) = XEXP (orig, i);
4406 break;
4407
4408 case 'E':
4409 case 'V':
4410 XVEC (copy, i) = XVEC (orig, i);
4411 if (XVEC (orig, i) != NULL)
4412 {
4413 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4414 for (j = 0; j < XVECLEN (copy, i); j++)
4415 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4416 }
4417 break;
4418
4419 case 'w':
4420 XWINT (copy, i) = XWINT (orig, i);
4421 break;
4422
4423 case 'i':
4424 XINT (copy, i) = XINT (orig, i);
4425 break;
4426
4427 case 's':
4428 case 'S':
4429 XSTR (copy, i) = XSTR (orig, i);
4430 break;
4431
4432 default:
4433 abort ();
4434 }
4435 }
4436 return copy;
4437 }
4438
4439 \f
4440 /* Try to rewrite a memory address to make it valid */
4441
4442 void
4443 rewrite_address (mem_rtx)
4444 rtx mem_rtx;
4445 {
4446 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4447 int scale = 1;
4448 int offset_adjust = 0;
4449 int was_only_offset = 0;
4450 rtx mem_addr = XEXP (mem_rtx, 0);
4451 char *storage = oballoc (0);
4452 int in_struct = 0;
4453 int is_spill_rtx = 0;
4454
4455 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4456 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4457
4458 if (GET_CODE (mem_addr) == PLUS
4459 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4460 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4461 {
4462 /* This part is utilized by the combiner. */
4463 ret_rtx
4464 = gen_rtx (PLUS, GET_MODE (mem_addr),
4465 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4466 XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4467 XEXP (XEXP (mem_addr, 1), 1));
4468
4469 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4470 {
4471 XEXP (mem_rtx, 0) = ret_rtx;
4472 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4473 return;
4474 }
4475
4476 obfree (storage);
4477 }
4478
4479 /* This part is utilized by loop.c.
4480 If the address contains PLUS (reg,const) and this pattern is invalid
4481 in this case - try to rewrite the address to make it valid. */
4482 storage = oballoc (0);
4483 index_rtx = base_rtx = offset_rtx = NULL;
4484
4485 /* Find the base index and offset elements of the memory address. */
4486 if (GET_CODE (mem_addr) == PLUS)
4487 {
4488 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4489 {
4490 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4491 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4492 else
4493 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4494 }
4495
4496 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4497 {
4498 index_rtx = XEXP (mem_addr, 0);
4499 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4500 base_rtx = XEXP (mem_addr, 1);
4501 else
4502 offset_rtx = XEXP (mem_addr, 1);
4503 }
4504
4505 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4506 {
4507 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4508 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4509 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4510 == REG)
4511 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4512 == CONST_INT)
4513 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4514 == CONST_INT)
4515 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4516 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4517 {
4518 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4519 offset_rtx = XEXP (mem_addr, 1);
4520 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4521 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4522 }
4523 else
4524 {
4525 offset_rtx = XEXP (mem_addr, 1);
4526 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4527 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4528 }
4529 }
4530
4531 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4532 {
4533 was_only_offset = 1;
4534 index_rtx = NULL;
4535 base_rtx = NULL;
4536 offset_rtx = XEXP (mem_addr, 1);
4537 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4538 if (offset_adjust == 0)
4539 {
4540 XEXP (mem_rtx, 0) = offset_rtx;
4541 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4542 return;
4543 }
4544 }
4545 else
4546 {
4547 obfree (storage);
4548 return;
4549 }
4550 }
4551 else if (GET_CODE (mem_addr) == MULT)
4552 index_rtx = mem_addr;
4553 else
4554 {
4555 obfree (storage);
4556 return;
4557 }
4558
4559 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4560 {
4561 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4562 {
4563 obfree (storage);
4564 return;
4565 }
4566
4567 scale_rtx = XEXP (index_rtx, 1);
4568 scale = INTVAL (scale_rtx);
4569 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4570 }
4571
4572 /* Now find which of the elements are invalid and try to fix them. */
4573 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4574 {
4575 offset_adjust = INTVAL (index_rtx) * scale;
4576
4577 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4578 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4579 else if (offset_rtx == 0)
4580 offset_rtx = const0_rtx;
4581
4582 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4583 XEXP (mem_rtx, 0) = offset_rtx;
4584 return;
4585 }
4586
4587 if (base_rtx && GET_CODE (base_rtx) == PLUS
4588 && GET_CODE (XEXP (base_rtx, 0)) == REG
4589 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4590 {
4591 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4592 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4593 }
4594
4595 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4596 {
4597 offset_adjust += INTVAL (base_rtx);
4598 base_rtx = NULL;
4599 }
4600
4601 if (index_rtx && GET_CODE (index_rtx) == PLUS
4602 && GET_CODE (XEXP (index_rtx, 0)) == REG
4603 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4604 {
4605 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4606 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4607 }
4608
4609 if (index_rtx)
4610 {
4611 if (! LEGITIMATE_INDEX_P (index_rtx)
4612 && ! (index_rtx == stack_pointer_rtx && scale == 1
4613 && base_rtx == NULL))
4614 {
4615 obfree (storage);
4616 return;
4617 }
4618 }
4619
4620 if (base_rtx)
4621 {
4622 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4623 {
4624 obfree (storage);
4625 return;
4626 }
4627 }
4628
4629 if (offset_adjust != 0)
4630 {
4631 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4632 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4633 else
4634 offset_rtx = const0_rtx;
4635
4636 if (index_rtx)
4637 {
4638 if (base_rtx)
4639 {
4640 if (scale != 1)
4641 {
4642 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4643 gen_rtx (MULT, GET_MODE (index_rtx),
4644 index_rtx, scale_rtx),
4645 base_rtx);
4646
4647 if (GET_CODE (offset_rtx) != CONST_INT
4648 || INTVAL (offset_rtx) != 0)
4649 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4650 ret_rtx, offset_rtx);
4651 }
4652 else
4653 {
4654 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4655 index_rtx, base_rtx);
4656
4657 if (GET_CODE (offset_rtx) != CONST_INT
4658 || INTVAL (offset_rtx) != 0)
4659 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4660 ret_rtx, offset_rtx);
4661 }
4662 }
4663 else
4664 {
4665 if (scale != 1)
4666 {
4667 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4668 index_rtx, scale_rtx);
4669
4670 if (GET_CODE (offset_rtx) != CONST_INT
4671 || INTVAL (offset_rtx) != 0)
4672 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4673 ret_rtx, offset_rtx);
4674 }
4675 else
4676 {
4677 if (GET_CODE (offset_rtx) == CONST_INT
4678 && INTVAL (offset_rtx) == 0)
4679 ret_rtx = index_rtx;
4680 else
4681 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4682 index_rtx, offset_rtx);
4683 }
4684 }
4685 }
4686 else
4687 {
4688 if (base_rtx)
4689 {
4690 if (GET_CODE (offset_rtx) == CONST_INT
4691 && INTVAL (offset_rtx) == 0)
4692 ret_rtx = base_rtx;
4693 else
4694 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4695 offset_rtx);
4696 }
4697 else if (was_only_offset)
4698 ret_rtx = offset_rtx;
4699 else
4700 {
4701 obfree (storage);
4702 return;
4703 }
4704 }
4705
4706 XEXP (mem_rtx, 0) = ret_rtx;
4707 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4708 return;
4709 }
4710 else
4711 {
4712 obfree (storage);
4713 return;
4714 }
4715 }
4716 #endif /* NOTYET */
4717 \f
4718 /* Return 1 if the first insn to set cc before INSN also sets the register
4719 REG_RTX; otherwise return 0. */
4720 int
4721 last_to_set_cc (reg_rtx, insn)
4722 rtx reg_rtx, insn;
4723 {
4724 rtx prev_insn = PREV_INSN (insn);
4725
4726 while (prev_insn)
4727 {
4728 if (GET_CODE (prev_insn) == NOTE)
4729 ;
4730
4731 else if (GET_CODE (prev_insn) == INSN)
4732 {
4733 if (GET_CODE (PATTERN (prev_insn)) != SET)
4734 return (0);
4735
4736 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4737 {
4738 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4739 return (1);
4740
4741 return (0);
4742 }
4743
4744 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4745 return (0);
4746 }
4747
4748 else
4749 return (0);
4750
4751 prev_insn = PREV_INSN (prev_insn);
4752 }
4753
4754 return (0);
4755 }
4756 \f
4757 int
4758 doesnt_set_condition_code (pat)
4759 rtx pat;
4760 {
4761 switch (GET_CODE (pat))
4762 {
4763 case MEM:
4764 case REG:
4765 return 1;
4766
4767 default:
4768 return 0;
4769
4770 }
4771 }
4772 \f
4773 int
4774 sets_condition_code (pat)
4775 rtx pat;
4776 {
4777 switch (GET_CODE (pat))
4778 {
4779 case PLUS:
4780 case MINUS:
4781 case AND:
4782 case IOR:
4783 case XOR:
4784 case NOT:
4785 case NEG:
4786 case MULT:
4787 case DIV:
4788 case MOD:
4789 case UDIV:
4790 case UMOD:
4791 return 1;
4792
4793 default:
4794 return (0);
4795 }
4796 }
4797 \f
4798 int
4799 str_immediate_operand (op, mode)
4800 register rtx op;
4801 enum machine_mode mode ATTRIBUTE_UNUSED;
4802 {
4803 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4804 return 1;
4805
4806 return 0;
4807 }
4808 \f
4809 int
4810 is_fp_insn (insn)
4811 rtx insn;
4812 {
4813 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4814 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4815 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4816 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4817 return 1;
4818
4819 return 0;
4820 }
4821
4822 /* Return 1 if the mode of the SET_DEST of insn is floating point
4823 and it is not an fld or a move from memory to memory.
4824 Otherwise return 0 */
4825
4826 int
4827 is_fp_dest (insn)
4828 rtx insn;
4829 {
4830 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4831 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4832 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4833 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4834 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4835 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4836 && GET_CODE (SET_SRC (insn)) != MEM)
4837 return 1;
4838
4839 return 0;
4840 }
4841
4842 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4843 memory and the source is a register. */
4844
4845 int
4846 is_fp_store (insn)
4847 rtx insn;
4848 {
4849 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4850 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4851 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4852 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4853 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4854 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4855 return 1;
4856
4857 return 0;
4858 }
4859 \f
4860 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4861 or index to reference memory.
4862 otherwise return 0 */
4863
4864 int
4865 agi_dependent (insn, dep_insn)
4866 rtx insn, dep_insn;
4867 {
4868 if (GET_CODE (dep_insn) == INSN
4869 && GET_CODE (PATTERN (dep_insn)) == SET
4870 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4871 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
4872
4873 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4874 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4875 && push_operand (SET_DEST (PATTERN (dep_insn)),
4876 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4877 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
4878
4879 return 0;
4880 }
4881 \f
4882 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4883 otherwise return 0. */
4884
4885 int
4886 reg_mentioned_in_mem (reg, rtl)
4887 rtx reg, rtl;
4888 {
4889 register char *fmt;
4890 register int i, j;
4891 register enum rtx_code code;
4892
4893 if (rtl == NULL)
4894 return 0;
4895
4896 code = GET_CODE (rtl);
4897
4898 switch (code)
4899 {
4900 case HIGH:
4901 case CONST_INT:
4902 case CONST:
4903 case CONST_DOUBLE:
4904 case SYMBOL_REF:
4905 case LABEL_REF:
4906 case PC:
4907 case CC0:
4908 case SUBREG:
4909 return 0;
4910 default:
4911 break;
4912 }
4913
4914 if (code == MEM && reg_mentioned_p (reg, rtl))
4915 return 1;
4916
4917 fmt = GET_RTX_FORMAT (code);
4918 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4919 {
4920 if (fmt[i] == 'E')
4921 {
4922 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4923 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4924 return 1;
4925 }
4926
4927 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4928 return 1;
4929 }
4930
4931 return 0;
4932 }
4933 \f
4934 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4935
4936 operands[0] = result, initialized with the startaddress
4937 operands[1] = alignment of the address.
4938 operands[2] = scratch register, initialized with the startaddress when
4939 not aligned, otherwise undefined
4940
4941 This is just the body. It needs the initialisations mentioned above and
4942 some address computing at the end. These things are done in i386.md. */
4943
4944 char *
4945 output_strlen_unroll (operands)
4946 rtx operands[];
4947 {
4948 rtx xops[18];
4949
4950 xops[0] = operands[0]; /* Result */
4951 /* operands[1]; * Alignment */
4952 xops[1] = operands[2]; /* Scratch */
4953 xops[2] = GEN_INT (0);
4954 xops[3] = GEN_INT (2);
4955 xops[4] = GEN_INT (3);
4956 xops[5] = GEN_INT (4);
4957 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4958 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4959 xops[8] = gen_label_rtx (); /* label of main loop */
4960
4961 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4962 xops[9] = gen_label_rtx (); /* pentium optimisation */
4963
4964 xops[10] = gen_label_rtx (); /* end label 2 */
4965 xops[11] = gen_label_rtx (); /* end label 1 */
4966 xops[12] = gen_label_rtx (); /* end label */
4967 /* xops[13] * Temporary used */
4968 xops[14] = GEN_INT (0xff);
4969 xops[15] = GEN_INT (0xff00);
4970 xops[16] = GEN_INT (0xff0000);
4971 xops[17] = GEN_INT (0xff000000);
4972
4973 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4974
4975 /* Is there a known alignment and is it less than 4? */
4976 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4977 {
4978 /* Is there a known alignment and is it not 2? */
4979 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4980 {
4981 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4982 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4983
4984 /* Leave just the 3 lower bits.
4985 If this is a q-register, then the high part is used later
4986 therefore use andl rather than andb. */
4987 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4988
4989 /* Is aligned to 4-byte address when zero */
4990 output_asm_insn (AS1 (je,%l8), xops);
4991
4992 /* Side-effect even Parity when %eax == 3 */
4993 output_asm_insn (AS1 (jp,%6), xops);
4994
4995 /* Is it aligned to 2 bytes ? */
4996 if (QI_REG_P (xops[1]))
4997 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4998 else
4999 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5000
5001 output_asm_insn (AS1 (je,%7), xops);
5002 }
5003 else
5004 {
5005 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5006 check if is aligned to 4 - byte. */
5007 output_asm_insn (AS2 (and%L1,%3,%1), xops);
5008
5009 /* Is aligned to 4-byte address when zero */
5010 output_asm_insn (AS1 (je,%l8), xops);
5011 }
5012
5013 xops[13] = gen_rtx_MEM (QImode, xops[0]);
5014
5015 /* Now compare the bytes; compare with the high part of a q-reg
5016 gives shorter code. */
5017 if (QI_REG_P (xops[1]))
5018 {
5019 /* Compare the first n unaligned byte on a byte per byte basis. */
5020 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5021
5022 /* When zero we reached the end. */
5023 output_asm_insn (AS1 (je,%l12), xops);
5024
5025 /* Increment the address. */
5026 output_asm_insn (AS1 (inc%L0,%0), xops);
5027
5028 /* Not needed with an alignment of 2 */
5029 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5030 {
5031 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5032 CODE_LABEL_NUMBER (xops[7]));
5033 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5034 output_asm_insn (AS1 (je,%l12), xops);
5035 output_asm_insn (AS1 (inc%L0,%0), xops);
5036
5037 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5038 CODE_LABEL_NUMBER (xops[6]));
5039 }
5040
5041 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5042 }
5043 else
5044 {
5045 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5046 output_asm_insn (AS1 (je,%l12), xops);
5047 output_asm_insn (AS1 (inc%L0,%0), xops);
5048
5049 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5050 CODE_LABEL_NUMBER (xops[7]));
5051 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5052 output_asm_insn (AS1 (je,%l12), xops);
5053 output_asm_insn (AS1 (inc%L0,%0), xops);
5054
5055 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5056 CODE_LABEL_NUMBER (xops[6]));
5057 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5058 }
5059
5060 output_asm_insn (AS1 (je,%l12), xops);
5061 output_asm_insn (AS1 (inc%L0,%0), xops);
5062 }
5063
5064 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5065 align this loop. It gives only huge programs, but does not help to
5066 speed up. */
5067 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5068
5069 xops[13] = gen_rtx_MEM (SImode, xops[0]);
5070 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5071
5072 if (QI_REG_P (xops[1]))
5073 {
5074 /* On i586 it is faster to combine the hi- and lo- part as
5075 a kind of lookahead. If anding both yields zero, then one
5076 of both *could* be zero, otherwise none of both is zero;
5077 this saves one instruction, on i486 this is slower
5078 tested with P-90, i486DX2-66, AMD486DX2-66 */
5079 if (TARGET_PENTIUM)
5080 {
5081 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5082 output_asm_insn (AS1 (jne,%l9), xops);
5083 }
5084
5085 /* Check first byte. */
5086 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5087 output_asm_insn (AS1 (je,%l12), xops);
5088
5089 /* Check second byte. */
5090 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5091 output_asm_insn (AS1 (je,%l11), xops);
5092
5093 if (TARGET_PENTIUM)
5094 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5095 CODE_LABEL_NUMBER (xops[9]));
5096 }
5097
5098 else
5099 {
5100 /* Check first byte. */
5101 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5102 output_asm_insn (AS1 (je,%l12), xops);
5103
5104 /* Check second byte. */
5105 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5106 output_asm_insn (AS1 (je,%l11), xops);
5107 }
5108
5109 /* Check third byte. */
5110 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5111 output_asm_insn (AS1 (je,%l10), xops);
5112
5113 /* Check fourth byte and increment address. */
5114 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5115 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5116 output_asm_insn (AS1 (jne,%l8), xops);
5117
5118 /* Now generate fixups when the compare stops within a 4-byte word. */
5119 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5120
5121 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5122 output_asm_insn (AS1 (inc%L0,%0), xops);
5123
5124 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5125 output_asm_insn (AS1 (inc%L0,%0), xops);
5126
5127 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5128
5129 return "";
5130 }
5131
5132 char *
5133 output_fp_conditional_move (which_alternative, operands)
5134 int which_alternative;
5135 rtx operands[];
5136 {
5137 switch (which_alternative)
5138 {
5139 case 0:
5140 /* r <- cond ? arg : r */
5141 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5142 break;
5143
5144 case 1:
5145 /* r <- cond ? r : arg */
5146 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5147 break;
5148
5149 case 2:
5150 /* r <- cond ? r : arg */
5151 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5152 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5153 break;
5154
5155 default:
5156 abort ();
5157 }
5158
5159 return "";
5160 }
5161
5162 char *
5163 output_int_conditional_move (which_alternative, operands)
5164 int which_alternative;
5165 rtx operands[];
5166 {
5167 int code = GET_CODE (operands[1]);
5168 enum machine_mode mode;
5169 rtx xops[4];
5170
5171 /* This is very tricky. We have to do it right. For a code segement
5172 like:
5173
5174 int foo, bar;
5175 ....
5176 foo = foo - x;
5177 if (foo >= 0)
5178 bar = y;
5179
5180 final_scan_insn () may delete the insn which sets CC. We have to
5181 tell final_scan_insn () if it should be reinserted. When CODE is
5182 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5183 NULL_PTR to tell final to reinsert the test insn because the
5184 conditional move cannot be handled properly without it. */
5185 if ((code == GT || code == LE)
5186 && (cc_prev_status.flags & CC_NO_OVERFLOW))
5187 return NULL_PTR;
5188
5189 mode = GET_MODE (operands [0]);
5190 if (mode == DImode)
5191 {
5192 xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5193 xops [1] = operands [1];
5194 xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5195 xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5196 }
5197
5198 switch (which_alternative)
5199 {
5200 case 0:
5201 /* r <- cond ? arg : r */
5202 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5203 if (mode == DImode)
5204 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5205 break;
5206
5207 case 1:
5208 /* r <- cond ? r : arg */
5209 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5210 if (mode == DImode)
5211 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5212 break;
5213
5214 case 2:
5215 /* rm <- cond ? arg1 : arg2 */
5216 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5217 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5218 if (mode == DImode)
5219 {
5220 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5221 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5222 }
5223 break;
5224
5225 default:
5226 abort ();
5227 }
5228
5229 return "";
5230 }
This page took 0.270331 seconds and 5 git commands to generate.