]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
final.c (final): If a label is reached only from a single jump...
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <setjmp.h>
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "hard-reg-set.h"
27 #include "real.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "tree.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "toplev.h"
40
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
48 #endif
49
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
52 #endif
53
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
55 enum reg_mem
56 {
57 reg_p,
58 mem_p,
59 imm_p
60 };
61
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost = { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
71 };
72
73 struct processor_costs i486_cost = { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
81 };
82
83 struct processor_costs pentium_cost = {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
91 };
92
93 struct processor_costs pentiumpro_cost = {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
101 };
102
103 struct processor_costs k6_cost = {
104 1, /* cost of an add instruction */
105 1, /* cost of a lea instruction */
106 1, /* variable shift costs */
107 1, /* constant shift costs */
108 2, /* cost of starting a multiply */
109 0, /* cost of multiply per each bit set */
110 18 /* cost of a divide/mod */
111 };
112
113 struct processor_costs *ix86_cost = &pentium_cost;
114
115 /* Processor feature/optimization bitmasks. */
116 #define m_386 (1<<PROCESSOR_I386)
117 #define m_486 (1<<PROCESSOR_I486)
118 #define m_PENT (1<<PROCESSOR_PENTIUM)
119 #define m_PPRO (1<<PROCESSOR_PENTIUMPRO)
120 #define m_K6 (1<<PROCESSOR_K6)
121
122 const int x86_use_leave = m_386 | m_K6;
123 const int x86_push_memory = m_386 | m_K6;
124 const int x86_zero_extend_with_and = m_486 | m_PENT;
125 const int x86_movx = m_386 | m_PPRO | m_K6;
126 const int x86_double_with_add = ~m_386;
127 const int x86_use_bit_test = m_386;
128 const int x86_unroll_strlen = m_486 | m_PENT | m_PPRO;
129 const int x86_use_q_reg = m_PENT | m_PPRO | m_K6;
130 const int x86_use_any_reg = m_486;
131 const int x86_cmove = m_PPRO;
132 const int x86_deep_branch = m_PPRO| m_K6;
133
134 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
135
136 extern FILE *asm_out_file;
137 extern char *strcat ();
138
139 static void ix86_epilogue PROTO((int));
140 static void ix86_prologue PROTO((int));
141
142 char *singlemove_string ();
143 char *output_move_const_single ();
144 char *output_fp_cc0_set ();
145
146 char *hi_reg_name[] = HI_REGISTER_NAMES;
147 char *qi_reg_name[] = QI_REGISTER_NAMES;
148 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
149
150 /* Array of the smallest class containing reg number REGNO, indexed by
151 REGNO. Used by REGNO_REG_CLASS in i386.h. */
152
153 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
154 {
155 /* ax, dx, cx, bx */
156 AREG, DREG, CREG, BREG,
157 /* si, di, bp, sp */
158 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
159 /* FP registers */
160 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
161 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
162 /* arg pointer */
163 INDEX_REGS
164 };
165
166 /* Test and compare insns in i386.md store the information needed to
167 generate branch and scc insns here. */
168
169 struct rtx_def *i386_compare_op0 = NULL_RTX;
170 struct rtx_def *i386_compare_op1 = NULL_RTX;
171 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
172
173 /* which cpu are we scheduling for */
174 enum processor_type ix86_cpu;
175
176 /* which instruction set architecture to use. */
177 int ix86_arch;
178
179 /* Strings to hold which cpu and instruction set architecture to use. */
180 char *ix86_cpu_string; /* for -mcpu=<xxx> */
181 char *ix86_arch_string; /* for -march=<xxx> */
182
183 /* Register allocation order */
184 char *i386_reg_alloc_order;
185 static char regs_allocated[FIRST_PSEUDO_REGISTER];
186
187 /* # of registers to use to pass arguments. */
188 char *i386_regparm_string;
189
190 /* i386_regparm_string as a number */
191 int i386_regparm;
192
193 /* Alignment to use for loops and jumps: */
194
195 /* Power of two alignment for loops. */
196 char *i386_align_loops_string;
197
198 /* Power of two alignment for non-loop jumps. */
199 char *i386_align_jumps_string;
200
201 /* Values 1-5: see jump.c */
202 int i386_branch_cost;
203 char *i386_branch_cost_string;
204
205 /* Power of two alignment for functions. */
206 int i386_align_funcs;
207 char *i386_align_funcs_string;
208
209 /* Power of two alignment for loops. */
210 int i386_align_loops;
211
212 /* Power of two alignment for non-loop jumps. */
213 int i386_align_jumps;
214
215 /* Sometimes certain combinations of command options do not make
216 sense on a particular target machine. You can define a macro
217 `OVERRIDE_OPTIONS' to take account of this. This macro, if
218 defined, is executed once just after all the command options have
219 been parsed.
220
221 Don't use this macro to turn on various extra optimizations for
222 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
223
224 void
225 override_options ()
226 {
227 int ch, i, j;
228 int def_align;
229
230 static struct ptt
231 {
232 char *name; /* Canonical processor name. */
233 enum processor_type processor; /* Processor type enum value. */
234 struct processor_costs *cost; /* Processor costs */
235 int target_enable; /* Target flags to enable. */
236 int target_disable; /* Target flags to disable. */
237 } processor_target_table[]
238 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
239 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
240 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
241 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
242 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
243 0, 0},
244 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
245 &pentiumpro_cost, 0, 0},
246 {PROCESSOR_K6_STRING, PROCESSOR_K6, &k6_cost, 0, 0}};
247
248 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
249
250 #ifdef SUBTARGET_OVERRIDE_OPTIONS
251 SUBTARGET_OVERRIDE_OPTIONS;
252 #endif
253
254 /* Validate registers in register allocation order. */
255 if (i386_reg_alloc_order)
256 {
257 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
258 {
259 int regno = 0;
260
261 switch (ch)
262 {
263 case 'a': regno = 0; break;
264 case 'd': regno = 1; break;
265 case 'c': regno = 2; break;
266 case 'b': regno = 3; break;
267 case 'S': regno = 4; break;
268 case 'D': regno = 5; break;
269 case 'B': regno = 6; break;
270
271 default: fatal ("Register '%c' is unknown", ch);
272 }
273
274 if (regs_allocated[regno])
275 fatal ("Register '%c' already specified in allocation order", ch);
276
277 regs_allocated[regno] = 1;
278 }
279 }
280
281 if (ix86_arch_string == 0)
282 {
283 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
284 if (ix86_cpu_string == 0)
285 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
286 }
287
288 for (i = 0; i < ptt_size; i++)
289 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
290 {
291 ix86_arch = processor_target_table[i].processor;
292 if (ix86_cpu_string == 0)
293 ix86_cpu_string = processor_target_table[i].name;
294 break;
295 }
296
297 if (i == ptt_size)
298 {
299 error ("bad value (%s) for -march= switch", ix86_arch_string);
300 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
301 ix86_arch = PROCESSOR_DEFAULT;
302 }
303
304 if (ix86_cpu_string == 0)
305 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
306
307 for (j = 0; j < ptt_size; j++)
308 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
309 {
310 ix86_cpu = processor_target_table[j].processor;
311 ix86_cost = processor_target_table[j].cost;
312 if (i > j && (int) ix86_arch >= (int) PROCESSOR_K6)
313 error ("-mcpu=%s does not support -march=%s",
314 ix86_cpu_string, ix86_arch_string);
315
316 target_flags |= processor_target_table[j].target_enable;
317 target_flags &= ~processor_target_table[j].target_disable;
318 break;
319 }
320
321 if (j == ptt_size)
322 {
323 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
324 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
325 ix86_cpu = PROCESSOR_DEFAULT;
326 }
327
328 /* Validate -mregparm= value. */
329 if (i386_regparm_string)
330 {
331 i386_regparm = atoi (i386_regparm_string);
332 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
333 fatal ("-mregparm=%d is not between 0 and %d",
334 i386_regparm, REGPARM_MAX);
335 }
336
337 /* The 486 suffers more from non-aligned cache line fills, and the
338 larger code size results in a larger cache foot-print and more misses.
339 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
340 cache line. */
341 def_align = (TARGET_486) ? 4 : 2;
342
343 /* Validate -malign-loops= value, or provide default. */
344 if (i386_align_loops_string)
345 {
346 i386_align_loops = atoi (i386_align_loops_string);
347 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
348 fatal ("-malign-loops=%d is not between 0 and %d",
349 i386_align_loops, MAX_CODE_ALIGN);
350 }
351 else
352 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
353 i386_align_loops = 4;
354 #else
355 i386_align_loops = 2;
356 #endif
357
358 /* Validate -malign-jumps= value, or provide default. */
359 if (i386_align_jumps_string)
360 {
361 i386_align_jumps = atoi (i386_align_jumps_string);
362 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
363 fatal ("-malign-jumps=%d is not between 0 and %d",
364 i386_align_jumps, MAX_CODE_ALIGN);
365 }
366 else
367 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
368 i386_align_jumps = 4;
369 #else
370 i386_align_jumps = def_align;
371 #endif
372
373 /* Validate -malign-functions= value, or provide default. */
374 if (i386_align_funcs_string)
375 {
376 i386_align_funcs = atoi (i386_align_funcs_string);
377 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
378 fatal ("-malign-functions=%d is not between 0 and %d",
379 i386_align_funcs, MAX_CODE_ALIGN);
380 }
381 else
382 i386_align_funcs = def_align;
383
384 /* Validate -mbranch-cost= value, or provide default. */
385 if (i386_branch_cost_string)
386 {
387 i386_branch_cost = atoi (i386_branch_cost_string);
388 if (i386_branch_cost < 0 || i386_branch_cost > 5)
389 fatal ("-mbranch-cost=%d is not between 0 and 5",
390 i386_branch_cost);
391 }
392 else
393 i386_branch_cost = 1;
394
395 /* Keep nonleaf frame pointers. */
396 if (TARGET_OMIT_LEAF_FRAME_POINTER)
397 flag_omit_frame_pointer = 1;
398 }
399 \f
400 /* A C statement (sans semicolon) to choose the order in which to
401 allocate hard registers for pseudo-registers local to a basic
402 block.
403
404 Store the desired register order in the array `reg_alloc_order'.
405 Element 0 should be the register to allocate first; element 1, the
406 next register; and so on.
407
408 The macro body should not assume anything about the contents of
409 `reg_alloc_order' before execution of the macro.
410
411 On most machines, it is not necessary to define this macro. */
412
413 void
414 order_regs_for_local_alloc ()
415 {
416 int i, ch, order;
417
418 /* User specified the register allocation order. */
419
420 if (i386_reg_alloc_order)
421 {
422 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
423 {
424 int regno = 0;
425
426 switch (ch)
427 {
428 case 'a': regno = 0; break;
429 case 'd': regno = 1; break;
430 case 'c': regno = 2; break;
431 case 'b': regno = 3; break;
432 case 'S': regno = 4; break;
433 case 'D': regno = 5; break;
434 case 'B': regno = 6; break;
435 }
436
437 reg_alloc_order[order++] = regno;
438 }
439
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 {
442 if (! regs_allocated[i])
443 reg_alloc_order[order++] = i;
444 }
445 }
446
447 /* If user did not specify a register allocation order, use natural order. */
448 else
449 {
450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
451 reg_alloc_order[i] = i;
452 }
453 }
454 \f
455 void
456 optimization_options (level, size)
457 int level;
458 int size ATTRIBUTE_UNUSED;
459 {
460 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
461 make the problem with not enough registers even worse. */
462 #ifdef INSN_SCHEDULING
463 if (level > 1)
464 flag_schedule_insns = 0;
465 #endif
466 }
467 \f
468 /* Sign-extend a 16-bit constant */
469
470 struct rtx_def *
471 i386_sext16_if_const (op)
472 struct rtx_def *op;
473 {
474 if (GET_CODE (op) == CONST_INT)
475 {
476 HOST_WIDE_INT val = INTVAL (op);
477 HOST_WIDE_INT sext_val;
478 if (val & 0x8000)
479 sext_val = val | ~0xffff;
480 else
481 sext_val = val & 0xffff;
482 if (sext_val != val)
483 op = GEN_INT (sext_val);
484 }
485 return op;
486 }
487 \f
488 /* Return nonzero if the rtx is aligned */
489
490 static int
491 i386_aligned_reg_p (regno)
492 int regno;
493 {
494 return (regno == STACK_POINTER_REGNUM
495 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
496 }
497
498 int
499 i386_aligned_p (op)
500 rtx op;
501 {
502 /* Registers and immediate operands are always "aligned". */
503 if (GET_CODE (op) != MEM)
504 return 1;
505
506 /* Don't even try to do any aligned optimizations with volatiles. */
507 if (MEM_VOLATILE_P (op))
508 return 0;
509
510 /* Get address of memory operand. */
511 op = XEXP (op, 0);
512
513 switch (GET_CODE (op))
514 {
515 case CONST_INT:
516 if (INTVAL (op) & 3)
517 break;
518 return 1;
519
520 /* Match "reg + offset" */
521 case PLUS:
522 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
523 break;
524 if (INTVAL (XEXP (op, 1)) & 3)
525 break;
526
527 op = XEXP (op, 0);
528 if (GET_CODE (op) != REG)
529 break;
530
531 /* ... fall through ... */
532
533 case REG:
534 return i386_aligned_reg_p (REGNO (op));
535
536 default:
537 break;
538 }
539
540 return 0;
541 }
542 \f
543 /* Return nonzero if INSN looks like it won't compute useful cc bits
544 as a side effect. This information is only a hint. */
545
546 int
547 i386_cc_probably_useless_p (insn)
548 rtx insn;
549 {
550 return ! next_cc0_user (insn);
551 }
552 \f
553 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
554 attribute for DECL. The attributes in ATTRIBUTES have previously been
555 assigned to DECL. */
556
557 int
558 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
559 tree decl ATTRIBUTE_UNUSED;
560 tree attributes ATTRIBUTE_UNUSED;
561 tree identifier ATTRIBUTE_UNUSED;
562 tree args ATTRIBUTE_UNUSED;
563 {
564 return 0;
565 }
566
567 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
568 attribute for TYPE. The attributes in ATTRIBUTES have previously been
569 assigned to TYPE. */
570
571 int
572 i386_valid_type_attribute_p (type, attributes, identifier, args)
573 tree type;
574 tree attributes ATTRIBUTE_UNUSED;
575 tree identifier;
576 tree args;
577 {
578 if (TREE_CODE (type) != FUNCTION_TYPE
579 && TREE_CODE (type) != METHOD_TYPE
580 && TREE_CODE (type) != FIELD_DECL
581 && TREE_CODE (type) != TYPE_DECL)
582 return 0;
583
584 /* Stdcall attribute says callee is responsible for popping arguments
585 if they are not variable. */
586 if (is_attribute_p ("stdcall", identifier))
587 return (args == NULL_TREE);
588
589 /* Cdecl attribute says the callee is a normal C declaration. */
590 if (is_attribute_p ("cdecl", identifier))
591 return (args == NULL_TREE);
592
593 /* Regparm attribute specifies how many integer arguments are to be
594 passed in registers. */
595 if (is_attribute_p ("regparm", identifier))
596 {
597 tree cst;
598
599 if (! args || TREE_CODE (args) != TREE_LIST
600 || TREE_CHAIN (args) != NULL_TREE
601 || TREE_VALUE (args) == NULL_TREE)
602 return 0;
603
604 cst = TREE_VALUE (args);
605 if (TREE_CODE (cst) != INTEGER_CST)
606 return 0;
607
608 if (TREE_INT_CST_HIGH (cst) != 0
609 || TREE_INT_CST_LOW (cst) < 0
610 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
611 return 0;
612
613 return 1;
614 }
615
616 return 0;
617 }
618
619 /* Return 0 if the attributes for two types are incompatible, 1 if they
620 are compatible, and 2 if they are nearly compatible (which causes a
621 warning to be generated). */
622
623 int
624 i386_comp_type_attributes (type1, type2)
625 tree type1 ATTRIBUTE_UNUSED;
626 tree type2 ATTRIBUTE_UNUSED;
627 {
628 return 1;
629 }
630
631 \f
632 /* Value is the number of bytes of arguments automatically
633 popped when returning from a subroutine call.
634 FUNDECL is the declaration node of the function (as a tree),
635 FUNTYPE is the data type of the function (as a tree),
636 or for a library call it is an identifier node for the subroutine name.
637 SIZE is the number of bytes of arguments passed on the stack.
638
639 On the 80386, the RTD insn may be used to pop them if the number
640 of args is fixed, but if the number is variable then the caller
641 must pop them all. RTD can't be used for library calls now
642 because the library is compiled with the Unix compiler.
643 Use of RTD is a selectable option, since it is incompatible with
644 standard Unix calling sequences. If the option is not selected,
645 the caller must always pop the args.
646
647 The attribute stdcall is equivalent to RTD on a per module basis. */
648
649 int
650 i386_return_pops_args (fundecl, funtype, size)
651 tree fundecl;
652 tree funtype;
653 int size;
654 {
655 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
656
657 /* Cdecl functions override -mrtd, and never pop the stack. */
658 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
659
660 /* Stdcall functions will pop the stack if not variable args. */
661 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
662 rtd = 1;
663
664 if (rtd
665 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
666 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
667 == void_type_node)))
668 return size;
669 }
670
671 /* Lose any fake structure return argument. */
672 if (aggregate_value_p (TREE_TYPE (funtype)))
673 return GET_MODE_SIZE (Pmode);
674
675 return 0;
676 }
677
678 \f
679 /* Argument support functions. */
680
681 /* Initialize a variable CUM of type CUMULATIVE_ARGS
682 for a call to a function whose data type is FNTYPE.
683 For a library call, FNTYPE is 0. */
684
685 void
686 init_cumulative_args (cum, fntype, libname)
687 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
688 tree fntype; /* tree ptr for function decl */
689 rtx libname; /* SYMBOL_REF of library name or 0 */
690 {
691 static CUMULATIVE_ARGS zero_cum;
692 tree param, next_param;
693
694 if (TARGET_DEBUG_ARG)
695 {
696 fprintf (stderr, "\ninit_cumulative_args (");
697 if (fntype)
698 fprintf (stderr, "fntype code = %s, ret code = %s",
699 tree_code_name[(int) TREE_CODE (fntype)],
700 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
701 else
702 fprintf (stderr, "no fntype");
703
704 if (libname)
705 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
706 }
707
708 *cum = zero_cum;
709
710 /* Set up the number of registers to use for passing arguments. */
711 cum->nregs = i386_regparm;
712 if (fntype)
713 {
714 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
715
716 if (attr)
717 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
718 }
719
720 /* Determine if this function has variable arguments. This is
721 indicated by the last argument being 'void_type_mode' if there
722 are no variable arguments. If there are variable arguments, then
723 we won't pass anything in registers */
724
725 if (cum->nregs)
726 {
727 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
728 param != 0; param = next_param)
729 {
730 next_param = TREE_CHAIN (param);
731 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
732 cum->nregs = 0;
733 }
734 }
735
736 if (TARGET_DEBUG_ARG)
737 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
738
739 return;
740 }
741
742 /* Update the data in CUM to advance over an argument
743 of mode MODE and data type TYPE.
744 (TYPE is null for libcalls where that information may not be available.) */
745
746 void
747 function_arg_advance (cum, mode, type, named)
748 CUMULATIVE_ARGS *cum; /* current arg information */
749 enum machine_mode mode; /* current arg mode */
750 tree type; /* type of the argument or 0 if lib support */
751 int named; /* whether or not the argument was named */
752 {
753 int bytes
754 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
755 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
756
757 if (TARGET_DEBUG_ARG)
758 fprintf (stderr,
759 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
760 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
761
762 cum->words += words;
763 cum->nregs -= words;
764 cum->regno += words;
765
766 if (cum->nregs <= 0)
767 {
768 cum->nregs = 0;
769 cum->regno = 0;
770 }
771
772 return;
773 }
774
775 /* Define where to put the arguments to a function.
776 Value is zero to push the argument on the stack,
777 or a hard register in which to store the argument.
778
779 MODE is the argument's machine mode.
780 TYPE is the data type of the argument (as a tree).
781 This is null for libcalls where that information may
782 not be available.
783 CUM is a variable of type CUMULATIVE_ARGS which gives info about
784 the preceding args and about the function being called.
785 NAMED is nonzero if this argument is a named parameter
786 (otherwise it is an extra parameter matching an ellipsis). */
787
788 struct rtx_def *
789 function_arg (cum, mode, type, named)
790 CUMULATIVE_ARGS *cum; /* current arg information */
791 enum machine_mode mode; /* current arg mode */
792 tree type; /* type of the argument or 0 if lib support */
793 int named; /* != 0 for normal args, == 0 for ... args */
794 {
795 rtx ret = NULL_RTX;
796 int bytes
797 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
798 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
799
800 switch (mode)
801 {
802 /* For now, pass fp/complex values on the stack. */
803 default:
804 break;
805
806 case BLKmode:
807 case DImode:
808 case SImode:
809 case HImode:
810 case QImode:
811 if (words <= cum->nregs)
812 ret = gen_rtx_REG (mode, cum->regno);
813 break;
814 }
815
816 if (TARGET_DEBUG_ARG)
817 {
818 fprintf (stderr,
819 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
820 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
821
822 if (ret)
823 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
824 else
825 fprintf (stderr, ", stack");
826
827 fprintf (stderr, " )\n");
828 }
829
830 return ret;
831 }
832
833 /* For an arg passed partly in registers and partly in memory,
834 this is the number of registers used.
835 For args passed entirely in registers or entirely in memory, zero. */
836
837 int
838 function_arg_partial_nregs (cum, mode, type, named)
839 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED; /* current arg information */
840 enum machine_mode mode ATTRIBUTE_UNUSED; /* current arg mode */
841 tree type ATTRIBUTE_UNUSED; /* type of the argument or 0 if lib support */
842 int named ATTRIBUTE_UNUSED; /* != 0 for normal args, == 0 for ... args */
843 {
844 return 0;
845 }
846 \f
847 /* Output an insn whose source is a 386 integer register. SRC is the
848 rtx for the register, and TEMPLATE is the op-code template. SRC may
849 be either SImode or DImode.
850
851 The template will be output with operands[0] as SRC, and operands[1]
852 as a pointer to the top of the 386 stack. So a call from floatsidf2
853 would look like this:
854
855 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
856
857 where %z0 corresponds to the caller's operands[1], and is used to
858 emit the proper size suffix.
859
860 ??? Extend this to handle HImode - a 387 can load and store HImode
861 values directly. */
862
863 void
864 output_op_from_reg (src, template)
865 rtx src;
866 char *template;
867 {
868 rtx xops[4];
869 int size = GET_MODE_SIZE (GET_MODE (src));
870
871 xops[0] = src;
872 xops[1] = AT_SP (Pmode);
873 xops[2] = GEN_INT (size);
874 xops[3] = stack_pointer_rtx;
875
876 if (size > UNITS_PER_WORD)
877 {
878 rtx high;
879
880 if (size > 2 * UNITS_PER_WORD)
881 {
882 high = gen_rtx_REG (SImode, REGNO (src) + 2);
883 output_asm_insn (AS1 (push%L0,%0), &high);
884 }
885
886 high = gen_rtx_REG (SImode, REGNO (src) + 1);
887 output_asm_insn (AS1 (push%L0,%0), &high);
888 }
889
890 output_asm_insn (AS1 (push%L0,%0), &src);
891 output_asm_insn (template, xops);
892 output_asm_insn (AS2 (add%L3,%2,%3), xops);
893 }
894 \f
895 /* Output an insn to pop an value from the 387 top-of-stack to 386
896 register DEST. The 387 register stack is popped if DIES is true. If
897 the mode of DEST is an integer mode, a `fist' integer store is done,
898 otherwise a `fst' float store is done. */
899
900 void
901 output_to_reg (dest, dies, scratch_mem)
902 rtx dest;
903 int dies;
904 rtx scratch_mem;
905 {
906 rtx xops[4];
907 int size = GET_MODE_SIZE (GET_MODE (dest));
908
909 if (! scratch_mem)
910 xops[0] = AT_SP (Pmode);
911 else
912 xops[0] = scratch_mem;
913
914 xops[1] = stack_pointer_rtx;
915 xops[2] = GEN_INT (size);
916 xops[3] = dest;
917
918 if (! scratch_mem)
919 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
920
921 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
922 {
923 if (dies)
924 output_asm_insn (AS1 (fistp%z3,%y0), xops);
925 else if (GET_MODE (xops[3]) == DImode && ! dies)
926 {
927 /* There is no DImode version of this without a stack pop, so
928 we must emulate it. It doesn't matter much what the second
929 instruction is, because the value being pushed on the FP stack
930 is not used except for the following stack popping store.
931 This case can only happen without optimization, so it doesn't
932 matter that it is inefficient. */
933 output_asm_insn (AS1 (fistp%z3,%0), xops);
934 output_asm_insn (AS1 (fild%z3,%0), xops);
935 }
936 else
937 output_asm_insn (AS1 (fist%z3,%y0), xops);
938 }
939
940 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
941 {
942 if (dies)
943 output_asm_insn (AS1 (fstp%z3,%y0), xops);
944 else
945 {
946 if (GET_MODE (dest) == XFmode)
947 {
948 output_asm_insn (AS1 (fstp%z3,%y0), xops);
949 output_asm_insn (AS1 (fld%z3,%y0), xops);
950 }
951 else
952 output_asm_insn (AS1 (fst%z3,%y0), xops);
953 }
954 }
955
956 else
957 abort ();
958
959 if (! scratch_mem)
960 output_asm_insn (AS1 (pop%L0,%0), &dest);
961 else
962 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
963
964
965 if (size > UNITS_PER_WORD)
966 {
967 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
968 if (! scratch_mem)
969 output_asm_insn (AS1 (pop%L0,%0), &dest);
970 else
971 {
972 xops[0] = adj_offsettable_operand (xops[0], 4);
973 xops[3] = dest;
974 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
975 }
976
977 if (size > 2 * UNITS_PER_WORD)
978 {
979 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
980 if (! scratch_mem)
981 output_asm_insn (AS1 (pop%L0,%0), &dest);
982 else
983 {
984 xops[0] = adj_offsettable_operand (xops[0], 4);
985 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
986 }
987 }
988 }
989 }
990 \f
991 char *
992 singlemove_string (operands)
993 rtx *operands;
994 {
995 rtx x;
996 if (GET_CODE (operands[0]) == MEM
997 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
998 {
999 if (XEXP (x, 0) != stack_pointer_rtx)
1000 abort ();
1001 return "push%L1 %1";
1002 }
1003 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
1004 return output_move_const_single (operands);
1005 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
1006 return AS2 (mov%L0,%1,%0);
1007 else if (CONSTANT_P (operands[1]))
1008 return AS2 (mov%L0,%1,%0);
1009 else
1010 {
1011 output_asm_insn ("push%L1 %1", operands);
1012 return "pop%L0 %0";
1013 }
1014 }
1015 \f
1016 /* Return a REG that occurs in ADDR with coefficient 1.
1017 ADDR can be effectively incremented by incrementing REG. */
1018
1019 static rtx
1020 find_addr_reg (addr)
1021 rtx addr;
1022 {
1023 while (GET_CODE (addr) == PLUS)
1024 {
1025 if (GET_CODE (XEXP (addr, 0)) == REG)
1026 addr = XEXP (addr, 0);
1027 else if (GET_CODE (XEXP (addr, 1)) == REG)
1028 addr = XEXP (addr, 1);
1029 else if (CONSTANT_P (XEXP (addr, 0)))
1030 addr = XEXP (addr, 1);
1031 else if (CONSTANT_P (XEXP (addr, 1)))
1032 addr = XEXP (addr, 0);
1033 else
1034 abort ();
1035 }
1036
1037 if (GET_CODE (addr) == REG)
1038 return addr;
1039 abort ();
1040 }
1041 \f
1042 /* Output an insn to add the constant N to the register X. */
1043
1044 static void
1045 asm_add (n, x)
1046 int n;
1047 rtx x;
1048 {
1049 rtx xops[2];
1050 xops[0] = x;
1051
1052 if (n == -1)
1053 output_asm_insn (AS1 (dec%L0,%0), xops);
1054 else if (n == 1)
1055 output_asm_insn (AS1 (inc%L0,%0), xops);
1056 else if (n < 0 || n == 128)
1057 {
1058 xops[1] = GEN_INT (-n);
1059 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1060 }
1061 else if (n > 0)
1062 {
1063 xops[1] = GEN_INT (n);
1064 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1065 }
1066 }
1067 \f
1068 /* Output assembler code to perform a doubleword move insn
1069 with operands OPERANDS. */
1070
1071 char *
1072 output_move_double (operands)
1073 rtx *operands;
1074 {
1075 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1076 rtx latehalf[2];
1077 rtx middlehalf[2];
1078 rtx xops[2];
1079 rtx addreg0 = 0, addreg1 = 0;
1080 int dest_overlapped_low = 0;
1081 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1082
1083 middlehalf[0] = 0;
1084 middlehalf[1] = 0;
1085
1086 /* First classify both operands. */
1087
1088 if (REG_P (operands[0]))
1089 optype0 = REGOP;
1090 else if (offsettable_memref_p (operands[0]))
1091 optype0 = OFFSOP;
1092 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1093 optype0 = POPOP;
1094 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1095 optype0 = PUSHOP;
1096 else if (GET_CODE (operands[0]) == MEM)
1097 optype0 = MEMOP;
1098 else
1099 optype0 = RNDOP;
1100
1101 if (REG_P (operands[1]))
1102 optype1 = REGOP;
1103 else if (CONSTANT_P (operands[1]))
1104 optype1 = CNSTOP;
1105 else if (offsettable_memref_p (operands[1]))
1106 optype1 = OFFSOP;
1107 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1108 optype1 = POPOP;
1109 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1110 optype1 = PUSHOP;
1111 else if (GET_CODE (operands[1]) == MEM)
1112 optype1 = MEMOP;
1113 else
1114 optype1 = RNDOP;
1115
1116 /* Check for the cases that the operand constraints are not
1117 supposed to allow to happen. Abort if we get one,
1118 because generating code for these cases is painful. */
1119
1120 if (optype0 == RNDOP || optype1 == RNDOP)
1121 abort ();
1122
1123 /* If one operand is decrementing and one is incrementing
1124 decrement the former register explicitly
1125 and change that operand into ordinary indexing. */
1126
1127 if (optype0 == PUSHOP && optype1 == POPOP)
1128 {
1129 /* ??? Can this ever happen on i386? */
1130 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1131 asm_add (-size, operands[0]);
1132 if (GET_MODE (operands[1]) == XFmode)
1133 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1134 else if (GET_MODE (operands[0]) == DFmode)
1135 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1136 else
1137 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1138 optype0 = OFFSOP;
1139 }
1140
1141 if (optype0 == POPOP && optype1 == PUSHOP)
1142 {
1143 /* ??? Can this ever happen on i386? */
1144 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1145 asm_add (-size, operands[1]);
1146 if (GET_MODE (operands[1]) == XFmode)
1147 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1148 else if (GET_MODE (operands[1]) == DFmode)
1149 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1150 else
1151 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1152 optype1 = OFFSOP;
1153 }
1154
1155 /* If an operand is an unoffsettable memory ref, find a register
1156 we can increment temporarily to make it refer to the second word. */
1157
1158 if (optype0 == MEMOP)
1159 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1160
1161 if (optype1 == MEMOP)
1162 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1163
1164 /* Ok, we can do one word at a time.
1165 Normally we do the low-numbered word first,
1166 but if either operand is autodecrementing then we
1167 do the high-numbered word first.
1168
1169 In either case, set up in LATEHALF the operands to use
1170 for the high-numbered word and in some cases alter the
1171 operands in OPERANDS to be suitable for the low-numbered word. */
1172
1173 if (size == 12)
1174 {
1175 if (optype0 == REGOP)
1176 {
1177 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1178 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1179 }
1180 else if (optype0 == OFFSOP)
1181 {
1182 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1183 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1184 }
1185 else
1186 {
1187 middlehalf[0] = operands[0];
1188 latehalf[0] = operands[0];
1189 }
1190
1191 if (optype1 == REGOP)
1192 {
1193 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1194 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1195 }
1196 else if (optype1 == OFFSOP)
1197 {
1198 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1199 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1200 }
1201 else if (optype1 == CNSTOP)
1202 {
1203 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1204 {
1205 REAL_VALUE_TYPE r; long l[3];
1206
1207 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1208 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1209 operands[1] = GEN_INT (l[0]);
1210 middlehalf[1] = GEN_INT (l[1]);
1211 latehalf[1] = GEN_INT (l[2]);
1212 }
1213 else if (CONSTANT_P (operands[1]))
1214 /* No non-CONST_DOUBLE constant should ever appear here. */
1215 abort ();
1216 }
1217 else
1218 {
1219 middlehalf[1] = operands[1];
1220 latehalf[1] = operands[1];
1221 }
1222 }
1223
1224 else
1225 {
1226 /* Size is not 12. */
1227
1228 if (optype0 == REGOP)
1229 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1230 else if (optype0 == OFFSOP)
1231 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1232 else
1233 latehalf[0] = operands[0];
1234
1235 if (optype1 == REGOP)
1236 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1237 else if (optype1 == OFFSOP)
1238 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1239 else if (optype1 == CNSTOP)
1240 split_double (operands[1], &operands[1], &latehalf[1]);
1241 else
1242 latehalf[1] = operands[1];
1243 }
1244
1245 /* If insn is effectively movd N (sp),-(sp) then we will do the
1246 high word first. We should use the adjusted operand 1
1247 (which is N+4 (sp) or N+8 (sp))
1248 for the low word and middle word as well,
1249 to compensate for the first decrement of sp. */
1250 if (optype0 == PUSHOP
1251 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1252 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1253 middlehalf[1] = operands[1] = latehalf[1];
1254
1255 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1256 if the upper part of reg N does not appear in the MEM, arrange to
1257 emit the move late-half first. Otherwise, compute the MEM address
1258 into the upper part of N and use that as a pointer to the memory
1259 operand. */
1260 if (optype0 == REGOP
1261 && (optype1 == OFFSOP || optype1 == MEMOP))
1262 {
1263 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1264 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1265 {
1266 /* If both halves of dest are used in the src memory address,
1267 compute the address into latehalf of dest. */
1268 compadr:
1269 xops[0] = latehalf[0];
1270 xops[1] = XEXP (operands[1], 0);
1271 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1272 if (GET_MODE (operands[1]) == XFmode)
1273 {
1274 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1275 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1276 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1277 }
1278 else
1279 {
1280 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1281 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1282 }
1283 }
1284
1285 else if (size == 12
1286 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1287 {
1288 /* Check for two regs used by both source and dest. */
1289 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1290 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1291 goto compadr;
1292
1293 /* JRV says this can't happen: */
1294 if (addreg0 || addreg1)
1295 abort ();
1296
1297 /* Only the middle reg conflicts; simply put it last. */
1298 output_asm_insn (singlemove_string (operands), operands);
1299 output_asm_insn (singlemove_string (latehalf), latehalf);
1300 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1301 return "";
1302 }
1303
1304 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1305 /* If the low half of dest is mentioned in the source memory
1306 address, the arrange to emit the move late half first. */
1307 dest_overlapped_low = 1;
1308 }
1309
1310 /* If one or both operands autodecrementing,
1311 do the two words, high-numbered first. */
1312
1313 /* Likewise, the first move would clobber the source of the second one,
1314 do them in the other order. This happens only for registers;
1315 such overlap can't happen in memory unless the user explicitly
1316 sets it up, and that is an undefined circumstance. */
1317
1318 #if 0
1319 if (optype0 == PUSHOP || optype1 == PUSHOP
1320 || (optype0 == REGOP && optype1 == REGOP
1321 && REGNO (operands[0]) == REGNO (latehalf[1]))
1322 || dest_overlapped_low)
1323 #endif
1324
1325 if (optype0 == PUSHOP || optype1 == PUSHOP
1326 || (optype0 == REGOP && optype1 == REGOP
1327 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1328 || REGNO (operands[0]) == REGNO (latehalf[1])))
1329 || dest_overlapped_low)
1330 {
1331 /* Make any unoffsettable addresses point at high-numbered word. */
1332 if (addreg0)
1333 asm_add (size-4, addreg0);
1334 if (addreg1)
1335 asm_add (size-4, addreg1);
1336
1337 /* Do that word. */
1338 output_asm_insn (singlemove_string (latehalf), latehalf);
1339
1340 /* Undo the adds we just did. */
1341 if (addreg0)
1342 asm_add (-4, addreg0);
1343 if (addreg1)
1344 asm_add (-4, addreg1);
1345
1346 if (size == 12)
1347 {
1348 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1349 if (addreg0)
1350 asm_add (-4, addreg0);
1351 if (addreg1)
1352 asm_add (-4, addreg1);
1353 }
1354
1355 /* Do low-numbered word. */
1356 return singlemove_string (operands);
1357 }
1358
1359 /* Normal case: do the two words, low-numbered first. */
1360
1361 output_asm_insn (singlemove_string (operands), operands);
1362
1363 /* Do the middle one of the three words for long double */
1364 if (size == 12)
1365 {
1366 if (addreg0)
1367 asm_add (4, addreg0);
1368 if (addreg1)
1369 asm_add (4, addreg1);
1370
1371 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1372 }
1373
1374 /* Make any unoffsettable addresses point at high-numbered word. */
1375 if (addreg0)
1376 asm_add (4, addreg0);
1377 if (addreg1)
1378 asm_add (4, addreg1);
1379
1380 /* Do that word. */
1381 output_asm_insn (singlemove_string (latehalf), latehalf);
1382
1383 /* Undo the adds we just did. */
1384 if (addreg0)
1385 asm_add (4-size, addreg0);
1386 if (addreg1)
1387 asm_add (4-size, addreg1);
1388
1389 return "";
1390 }
1391 \f
1392 #define MAX_TMPS 2 /* max temporary registers used */
1393
1394 /* Output the appropriate code to move push memory on the stack */
1395
1396 char *
1397 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1398 rtx operands[];
1399 rtx insn;
1400 int length;
1401 int tmp_start;
1402 int n_operands;
1403 {
1404 struct
1405 {
1406 char *load;
1407 char *push;
1408 rtx xops[2];
1409 } tmp_info[MAX_TMPS];
1410
1411 rtx src = operands[1];
1412 int max_tmps = 0;
1413 int offset = 0;
1414 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1415 int stack_offset = 0;
1416 int i, num_tmps;
1417 rtx xops[1];
1418
1419 if (! offsettable_memref_p (src))
1420 fatal_insn ("Source is not offsettable", insn);
1421
1422 if ((length & 3) != 0)
1423 fatal_insn ("Pushing non-word aligned size", insn);
1424
1425 /* Figure out which temporary registers we have available */
1426 for (i = tmp_start; i < n_operands; i++)
1427 {
1428 if (GET_CODE (operands[i]) == REG)
1429 {
1430 if (reg_overlap_mentioned_p (operands[i], src))
1431 continue;
1432
1433 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1434 if (max_tmps == MAX_TMPS)
1435 break;
1436 }
1437 }
1438
1439 if (max_tmps == 0)
1440 for (offset = length - 4; offset >= 0; offset -= 4)
1441 {
1442 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1443 output_asm_insn (AS1(push%L0,%0), xops);
1444 if (stack_p)
1445 stack_offset += 4;
1446 }
1447
1448 else
1449 for (offset = length - 4; offset >= 0; )
1450 {
1451 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1452 {
1453 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1454 tmp_info[num_tmps].push = AS1(push%L0,%1);
1455 tmp_info[num_tmps].xops[0]
1456 = adj_offsettable_operand (src, offset + stack_offset);
1457 offset -= 4;
1458 }
1459
1460 for (i = 0; i < num_tmps; i++)
1461 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1462
1463 for (i = 0; i < num_tmps; i++)
1464 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1465
1466 if (stack_p)
1467 stack_offset += 4*num_tmps;
1468 }
1469
1470 return "";
1471 }
1472 \f
1473 /* Output the appropriate code to move data between two memory locations */
1474
1475 char *
1476 output_move_memory (operands, insn, length, tmp_start, n_operands)
1477 rtx operands[];
1478 rtx insn;
1479 int length;
1480 int tmp_start;
1481 int n_operands;
1482 {
1483 struct
1484 {
1485 char *load;
1486 char *store;
1487 rtx xops[3];
1488 } tmp_info[MAX_TMPS];
1489
1490 rtx dest = operands[0];
1491 rtx src = operands[1];
1492 rtx qi_tmp = NULL_RTX;
1493 int max_tmps = 0;
1494 int offset = 0;
1495 int i, num_tmps;
1496 rtx xops[3];
1497
1498 if (GET_CODE (dest) == MEM
1499 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1500 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1501 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1502
1503 if (! offsettable_memref_p (src))
1504 fatal_insn ("Source is not offsettable", insn);
1505
1506 if (! offsettable_memref_p (dest))
1507 fatal_insn ("Destination is not offsettable", insn);
1508
1509 /* Figure out which temporary registers we have available */
1510 for (i = tmp_start; i < n_operands; i++)
1511 {
1512 if (GET_CODE (operands[i]) == REG)
1513 {
1514 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1515 qi_tmp = operands[i];
1516
1517 if (reg_overlap_mentioned_p (operands[i], dest))
1518 fatal_insn ("Temporary register overlaps the destination", insn);
1519
1520 if (reg_overlap_mentioned_p (operands[i], src))
1521 fatal_insn ("Temporary register overlaps the source", insn);
1522
1523 tmp_info[max_tmps++].xops[2] = operands[i];
1524 if (max_tmps == MAX_TMPS)
1525 break;
1526 }
1527 }
1528
1529 if (max_tmps == 0)
1530 fatal_insn ("No scratch registers were found to do memory->memory moves",
1531 insn);
1532
1533 if ((length & 1) != 0)
1534 {
1535 if (qi_tmp == 0)
1536 fatal_insn ("No byte register found when moving odd # of bytes.",
1537 insn);
1538 }
1539
1540 while (length > 1)
1541 {
1542 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1543 {
1544 if (length >= 4)
1545 {
1546 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1547 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1548 tmp_info[num_tmps].xops[0]
1549 = adj_offsettable_operand (dest, offset);
1550 tmp_info[num_tmps].xops[1]
1551 = adj_offsettable_operand (src, offset);
1552
1553 offset += 4;
1554 length -= 4;
1555 }
1556
1557 else if (length >= 2)
1558 {
1559 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1560 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1561 tmp_info[num_tmps].xops[0]
1562 = adj_offsettable_operand (dest, offset);
1563 tmp_info[num_tmps].xops[1]
1564 = adj_offsettable_operand (src, offset);
1565
1566 offset += 2;
1567 length -= 2;
1568 }
1569 else
1570 break;
1571 }
1572
1573 for (i = 0; i < num_tmps; i++)
1574 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1575
1576 for (i = 0; i < num_tmps; i++)
1577 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1578 }
1579
1580 if (length == 1)
1581 {
1582 xops[0] = adj_offsettable_operand (dest, offset);
1583 xops[1] = adj_offsettable_operand (src, offset);
1584 xops[2] = qi_tmp;
1585 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1586 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1587 }
1588
1589 return "";
1590 }
1591 \f
1592 int
1593 standard_80387_constant_p (x)
1594 rtx x;
1595 {
1596 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1597 REAL_VALUE_TYPE d;
1598 jmp_buf handler;
1599 int is0, is1;
1600
1601 if (setjmp (handler))
1602 return 0;
1603
1604 set_float_handler (handler);
1605 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1606 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1607 is1 = REAL_VALUES_EQUAL (d, dconst1);
1608 set_float_handler (NULL_PTR);
1609
1610 if (is0)
1611 return 1;
1612
1613 if (is1)
1614 return 2;
1615
1616 /* Note that on the 80387, other constants, such as pi,
1617 are much slower to load as standard constants
1618 than to load from doubles in memory! */
1619 /* ??? Not true on K6: all constants are equal cost. */
1620 #endif
1621
1622 return 0;
1623 }
1624
1625 char *
1626 output_move_const_single (operands)
1627 rtx *operands;
1628 {
1629 if (FP_REG_P (operands[0]))
1630 {
1631 int conval = standard_80387_constant_p (operands[1]);
1632
1633 if (conval == 1)
1634 return "fldz";
1635
1636 if (conval == 2)
1637 return "fld1";
1638 }
1639
1640 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1641 {
1642 REAL_VALUE_TYPE r; long l;
1643
1644 if (GET_MODE (operands[1]) == XFmode)
1645 abort ();
1646
1647 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1648 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1649 operands[1] = GEN_INT (l);
1650 }
1651
1652 return singlemove_string (operands);
1653 }
1654 \f
1655 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1656 reference and a constant. */
1657
1658 int
1659 symbolic_operand (op, mode)
1660 register rtx op;
1661 enum machine_mode mode ATTRIBUTE_UNUSED;
1662 {
1663 switch (GET_CODE (op))
1664 {
1665 case SYMBOL_REF:
1666 case LABEL_REF:
1667 return 1;
1668
1669 case CONST:
1670 op = XEXP (op, 0);
1671 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1672 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1673 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1674
1675 default:
1676 return 0;
1677 }
1678 }
1679
1680 /* Test for a valid operand for a call instruction.
1681 Don't allow the arg pointer register or virtual regs
1682 since they may change into reg + const, which the patterns
1683 can't handle yet. */
1684
1685 int
1686 call_insn_operand (op, mode)
1687 rtx op;
1688 enum machine_mode mode ATTRIBUTE_UNUSED;
1689 {
1690 if (GET_CODE (op) == MEM
1691 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1692 /* This makes a difference for PIC. */
1693 && general_operand (XEXP (op, 0), Pmode))
1694 || (GET_CODE (XEXP (op, 0)) == REG
1695 && XEXP (op, 0) != arg_pointer_rtx
1696 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1697 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1698 return 1;
1699
1700 return 0;
1701 }
1702
1703 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1704 even if pic. */
1705
1706 int
1707 expander_call_insn_operand (op, mode)
1708 rtx op;
1709 enum machine_mode mode ATTRIBUTE_UNUSED;
1710 {
1711 if (GET_CODE (op) == MEM
1712 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1713 || (GET_CODE (XEXP (op, 0)) == REG
1714 && XEXP (op, 0) != arg_pointer_rtx
1715 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1716 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1717 return 1;
1718
1719 return 0;
1720 }
1721
1722 /* Return 1 if OP is a comparison operator that can use the condition code
1723 generated by an arithmetic operation. */
1724
1725 int
1726 arithmetic_comparison_operator (op, mode)
1727 register rtx op;
1728 enum machine_mode mode;
1729 {
1730 enum rtx_code code;
1731
1732 if (mode != VOIDmode && mode != GET_MODE (op))
1733 return 0;
1734
1735 code = GET_CODE (op);
1736 if (GET_RTX_CLASS (code) != '<')
1737 return 0;
1738
1739 return (code != GT && code != LE);
1740 }
1741
1742 int
1743 ix86_logical_operator (op, mode)
1744 register rtx op;
1745 enum machine_mode mode ATTRIBUTE_UNUSED;
1746 {
1747 return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1748 }
1749
1750 \f
1751 /* Returns 1 if OP contains a symbol reference */
1752
1753 int
1754 symbolic_reference_mentioned_p (op)
1755 rtx op;
1756 {
1757 register char *fmt;
1758 register int i;
1759
1760 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1761 return 1;
1762
1763 fmt = GET_RTX_FORMAT (GET_CODE (op));
1764 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1765 {
1766 if (fmt[i] == 'E')
1767 {
1768 register int j;
1769
1770 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1771 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1772 return 1;
1773 }
1774
1775 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1776 return 1;
1777 }
1778
1779 return 0;
1780 }
1781 \f
1782 /* Attempt to expand a binary operator. Make the expansion closer to the
1783 actual machine, then just general_operand, which will allow 3 separate
1784 memory references (one output, two input) in a single insn. Return
1785 whether the insn fails, or succeeds. */
1786
1787 int
1788 ix86_expand_binary_operator (code, mode, operands)
1789 enum rtx_code code;
1790 enum machine_mode mode;
1791 rtx operands[];
1792 {
1793 int modified;
1794
1795 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1796 if (GET_RTX_CLASS (code) == 'c'
1797 && (rtx_equal_p (operands[0], operands[2])
1798 || immediate_operand (operands[1], mode)))
1799 {
1800 rtx temp = operands[1];
1801 operands[1] = operands[2];
1802 operands[2] = temp;
1803 }
1804
1805 /* If optimizing, copy to regs to improve CSE */
1806 if (TARGET_PSEUDO && optimize
1807 && ((reload_in_progress | reload_completed) == 0))
1808 {
1809 if (GET_CODE (operands[1]) == MEM
1810 && ! rtx_equal_p (operands[0], operands[1]))
1811 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1812
1813 if (GET_CODE (operands[2]) == MEM)
1814 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1815
1816 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1817 {
1818 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1819
1820 emit_move_insn (temp, operands[1]);
1821 operands[1] = temp;
1822 return TRUE;
1823 }
1824 }
1825
1826 if (!ix86_binary_operator_ok (code, mode, operands))
1827 {
1828 /* If not optimizing, try to make a valid insn (optimize code
1829 previously did this above to improve chances of CSE) */
1830
1831 if ((! TARGET_PSEUDO || !optimize)
1832 && ((reload_in_progress | reload_completed) == 0)
1833 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1834 {
1835 modified = FALSE;
1836 if (GET_CODE (operands[1]) == MEM
1837 && ! rtx_equal_p (operands[0], operands[1]))
1838 {
1839 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1840 modified = TRUE;
1841 }
1842
1843 if (GET_CODE (operands[2]) == MEM)
1844 {
1845 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1846 modified = TRUE;
1847 }
1848
1849 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1850 {
1851 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1852
1853 emit_move_insn (temp, operands[1]);
1854 operands[1] = temp;
1855 return TRUE;
1856 }
1857
1858 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1859 return FALSE;
1860 }
1861 else
1862 return FALSE;
1863 }
1864
1865 return TRUE;
1866 }
1867 \f
1868 /* Return TRUE or FALSE depending on whether the binary operator meets the
1869 appropriate constraints. */
1870
1871 int
1872 ix86_binary_operator_ok (code, mode, operands)
1873 enum rtx_code code;
1874 enum machine_mode mode ATTRIBUTE_UNUSED;
1875 rtx operands[3];
1876 {
1877 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1878 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1879 }
1880 \f
1881 /* Attempt to expand a unary operator. Make the expansion closer to the
1882 actual machine, then just general_operand, which will allow 2 separate
1883 memory references (one output, one input) in a single insn. Return
1884 whether the insn fails, or succeeds. */
1885
1886 int
1887 ix86_expand_unary_operator (code, mode, operands)
1888 enum rtx_code code;
1889 enum machine_mode mode;
1890 rtx operands[];
1891 {
1892 /* If optimizing, copy to regs to improve CSE */
1893 if (TARGET_PSEUDO
1894 && optimize
1895 && ((reload_in_progress | reload_completed) == 0)
1896 && GET_CODE (operands[1]) == MEM)
1897 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1898
1899 if (! ix86_unary_operator_ok (code, mode, operands))
1900 {
1901 if ((! TARGET_PSEUDO || optimize == 0)
1902 && ((reload_in_progress | reload_completed) == 0)
1903 && GET_CODE (operands[1]) == MEM)
1904 {
1905 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1906 if (! ix86_unary_operator_ok (code, mode, operands))
1907 return FALSE;
1908 }
1909 else
1910 return FALSE;
1911 }
1912
1913 return TRUE;
1914 }
1915 \f
1916 /* Return TRUE or FALSE depending on whether the unary operator meets the
1917 appropriate constraints. */
1918
1919 int
1920 ix86_unary_operator_ok (code, mode, operands)
1921 enum rtx_code code ATTRIBUTE_UNUSED;
1922 enum machine_mode mode ATTRIBUTE_UNUSED;
1923 rtx operands[2] ATTRIBUTE_UNUSED;
1924 {
1925 return TRUE;
1926 }
1927 \f
1928 static rtx pic_label_rtx;
1929 static char pic_label_name [256];
1930 static int pic_label_no = 0;
1931
1932 /* This function generates code for -fpic that loads %ebx with
1933 the return address of the caller and then returns. */
1934
1935 void
1936 asm_output_function_prefix (file, name)
1937 FILE *file;
1938 char *name ATTRIBUTE_UNUSED;
1939 {
1940 rtx xops[2];
1941 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1942 || current_function_uses_const_pool);
1943 xops[0] = pic_offset_table_rtx;
1944 xops[1] = stack_pointer_rtx;
1945
1946 /* Deep branch prediction favors having a return for every call. */
1947 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1948 {
1949 tree prologue_node;
1950
1951 if (pic_label_rtx == 0)
1952 {
1953 pic_label_rtx = gen_label_rtx ();
1954 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1955 LABEL_NAME (pic_label_rtx) = pic_label_name;
1956 }
1957
1958 prologue_node = make_node (FUNCTION_DECL);
1959 DECL_RESULT (prologue_node) = 0;
1960
1961 /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1962 internal (non-global) label that's being emitted, it didn't make
1963 sense to have .type information for local labels. This caused
1964 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1965 me debug info for a label that you're declaring non-global?) this
1966 was changed to call ASM_OUTPUT_LABEL() instead. */
1967
1968
1969 ASM_OUTPUT_LABEL (file, pic_label_name);
1970 output_asm_insn ("movl (%1),%0", xops);
1971 output_asm_insn ("ret", xops);
1972 }
1973 }
1974
1975 /* Generate the assembly code for function entry.
1976 FILE is an stdio stream to output the code to.
1977 SIZE is an int: how many units of temporary storage to allocate. */
1978
1979 void
1980 function_prologue (file, size)
1981 FILE *file ATTRIBUTE_UNUSED;
1982 int size ATTRIBUTE_UNUSED;
1983 {
1984 if (TARGET_SCHEDULE_PROLOGUE)
1985 {
1986 pic_label_rtx = 0;
1987 return;
1988 }
1989
1990 ix86_prologue (0);
1991 }
1992
1993 /* Expand the prologue into a bunch of separate insns. */
1994
1995 void
1996 ix86_expand_prologue ()
1997 {
1998 if (! TARGET_SCHEDULE_PROLOGUE)
1999 return;
2000
2001 ix86_prologue (1);
2002 }
2003
2004 void
2005 load_pic_register (do_rtl)
2006 int do_rtl;
2007 {
2008 rtx xops[4];
2009
2010 if (TARGET_DEEP_BRANCH_PREDICTION)
2011 {
2012 xops[0] = pic_offset_table_rtx;
2013 if (pic_label_rtx == 0)
2014 {
2015 pic_label_rtx = gen_label_rtx ();
2016 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
2017 LABEL_NAME (pic_label_rtx) = pic_label_name;
2018 }
2019
2020 xops[1] = gen_rtx_MEM (QImode,
2021 gen_rtx (SYMBOL_REF, Pmode,
2022 LABEL_NAME (pic_label_rtx)));
2023
2024 if (do_rtl)
2025 {
2026 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2027 emit_insn (gen_prologue_set_got (xops[0],
2028 gen_rtx (SYMBOL_REF, Pmode,
2029 "$_GLOBAL_OFFSET_TABLE_"),
2030 xops[1]));
2031 }
2032 else
2033 {
2034 output_asm_insn (AS1 (call,%X1), xops);
2035 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
2036 pic_label_rtx = 0;
2037 }
2038 }
2039
2040 else
2041 {
2042 xops[0] = pic_offset_table_rtx;
2043 xops[1] = gen_label_rtx ();
2044
2045 if (do_rtl)
2046 {
2047 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2048 a new CODE_LABEL after reload, so we need a single pattern to
2049 emit the 3 necessary instructions. */
2050 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2051 }
2052 else
2053 {
2054 output_asm_insn (AS1 (call,%P1), xops);
2055 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2056 CODE_LABEL_NUMBER (xops[1]));
2057 output_asm_insn (AS1 (pop%L0,%0), xops);
2058 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2059 }
2060 }
2061
2062 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2063 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2064 moved before any instruction which implicitly uses the got. */
2065
2066 if (do_rtl)
2067 emit_insn (gen_blockage ());
2068 }
2069
2070 static void
2071 ix86_prologue (do_rtl)
2072 int do_rtl;
2073 {
2074 register int regno;
2075 int limit;
2076 rtx xops[4];
2077 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2078 || current_function_uses_const_pool);
2079 long tsize = get_frame_size ();
2080 rtx insn;
2081 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2082
2083 xops[0] = stack_pointer_rtx;
2084 xops[1] = frame_pointer_rtx;
2085 xops[2] = GEN_INT (tsize);
2086
2087 if (frame_pointer_needed)
2088 {
2089 if (do_rtl)
2090 {
2091 insn = emit_insn (gen_rtx (SET, VOIDmode,
2092 gen_rtx_MEM (SImode,
2093 gen_rtx (PRE_DEC, SImode,
2094 stack_pointer_rtx)),
2095 frame_pointer_rtx));
2096
2097 RTX_FRAME_RELATED_P (insn) = 1;
2098 insn = emit_move_insn (xops[1], xops[0]);
2099 RTX_FRAME_RELATED_P (insn) = 1;
2100 }
2101
2102 else
2103 {
2104 output_asm_insn ("push%L1 %1", xops);
2105 #ifdef INCOMING_RETURN_ADDR_RTX
2106 if (dwarf2out_do_frame ())
2107 {
2108 char *l = dwarf2out_cfi_label ();
2109
2110 cfa_store_offset += 4;
2111 cfa_offset = cfa_store_offset;
2112 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2113 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2114 }
2115 #endif
2116
2117 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2118 #ifdef INCOMING_RETURN_ADDR_RTX
2119 if (dwarf2out_do_frame ())
2120 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2121 #endif
2122 }
2123 }
2124
2125 if (tsize == 0)
2126 ;
2127 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2128 {
2129 if (do_rtl)
2130 {
2131 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2132 RTX_FRAME_RELATED_P (insn) = 1;
2133 }
2134 else
2135 {
2136 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2137 #ifdef INCOMING_RETURN_ADDR_RTX
2138 if (dwarf2out_do_frame ())
2139 {
2140 cfa_store_offset += tsize;
2141 if (! frame_pointer_needed)
2142 {
2143 cfa_offset = cfa_store_offset;
2144 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2145 }
2146 }
2147 #endif
2148 }
2149 }
2150 else
2151 {
2152 xops[3] = gen_rtx_REG (SImode, 0);
2153 if (do_rtl)
2154 emit_move_insn (xops[3], xops[2]);
2155 else
2156 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2157
2158 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2159 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2160
2161 if (do_rtl)
2162 emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2163 else
2164 output_asm_insn (AS1 (call,%P3), xops);
2165 }
2166
2167 /* Note If use enter it is NOT reversed args.
2168 This one is not reversed from intel!!
2169 I think enter is slower. Also sdb doesn't like it.
2170 But if you want it the code is:
2171 {
2172 xops[3] = const0_rtx;
2173 output_asm_insn ("enter %2,%3", xops);
2174 }
2175 */
2176
2177 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2178 for (regno = limit - 1; regno >= 0; regno--)
2179 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2180 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2181 {
2182 xops[0] = gen_rtx_REG (SImode, regno);
2183 if (do_rtl)
2184 {
2185 insn = emit_insn (gen_rtx (SET, VOIDmode,
2186 gen_rtx_MEM (SImode,
2187 gen_rtx (PRE_DEC, SImode,
2188 stack_pointer_rtx)),
2189 xops[0]));
2190
2191 RTX_FRAME_RELATED_P (insn) = 1;
2192 }
2193 else
2194 {
2195 output_asm_insn ("push%L0 %0", xops);
2196 #ifdef INCOMING_RETURN_ADDR_RTX
2197 if (dwarf2out_do_frame ())
2198 {
2199 char *l = dwarf2out_cfi_label ();
2200
2201 cfa_store_offset += 4;
2202 if (! frame_pointer_needed)
2203 {
2204 cfa_offset = cfa_store_offset;
2205 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2206 }
2207
2208 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2209 }
2210 #endif
2211 }
2212 }
2213
2214 #ifdef SUBTARGET_PROLOGUE
2215 SUBTARGET_PROLOGUE;
2216 #endif
2217
2218 if (pic_reg_used)
2219 load_pic_register (do_rtl);
2220
2221 /* If we are profiling, make sure no instructions are scheduled before
2222 the call to mcount. However, if -fpic, the above call will have
2223 done that. */
2224 if ((profile_flag || profile_block_flag)
2225 && ! pic_reg_used && do_rtl)
2226 emit_insn (gen_blockage ());
2227 }
2228
2229 /* Return 1 if it is appropriate to emit `ret' instructions in the
2230 body of a function. Do this only if the epilogue is simple, needing a
2231 couple of insns. Prior to reloading, we can't tell how many registers
2232 must be saved, so return 0 then. Return 0 if there is no frame
2233 marker to de-allocate.
2234
2235 If NON_SAVING_SETJMP is defined and true, then it is not possible
2236 for the epilogue to be simple, so return 0. This is a special case
2237 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2238 until final, but jump_optimize may need to know sooner if a
2239 `return' is OK. */
2240
2241 int
2242 ix86_can_use_return_insn_p ()
2243 {
2244 int regno;
2245 int nregs = 0;
2246 int reglimit = (frame_pointer_needed
2247 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2248 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2249 || current_function_uses_const_pool);
2250
2251 #ifdef NON_SAVING_SETJMP
2252 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2253 return 0;
2254 #endif
2255
2256 if (! reload_completed)
2257 return 0;
2258
2259 for (regno = reglimit - 1; regno >= 0; regno--)
2260 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2261 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2262 nregs++;
2263
2264 return nregs == 0 || ! frame_pointer_needed;
2265 }
2266
2267 /* This function generates the assembly code for function exit.
2268 FILE is an stdio stream to output the code to.
2269 SIZE is an int: how many units of temporary storage to deallocate. */
2270
2271 void
2272 function_epilogue (file, size)
2273 FILE *file ATTRIBUTE_UNUSED;
2274 int size ATTRIBUTE_UNUSED;
2275 {
2276 return;
2277 }
2278
2279 /* Restore function stack, frame, and registers. */
2280
2281 void
2282 ix86_expand_epilogue ()
2283 {
2284 ix86_epilogue (1);
2285 }
2286
2287 static void
2288 ix86_epilogue (do_rtl)
2289 int do_rtl;
2290 {
2291 register int regno;
2292 register int nregs, limit;
2293 int offset;
2294 rtx xops[3];
2295 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2296 || current_function_uses_const_pool);
2297 long tsize = get_frame_size ();
2298
2299 /* Compute the number of registers to pop */
2300
2301 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2302
2303 nregs = 0;
2304
2305 for (regno = limit - 1; regno >= 0; regno--)
2306 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2307 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2308 nregs++;
2309
2310 /* sp is often unreliable so we must go off the frame pointer.
2311
2312 In reality, we may not care if sp is unreliable, because we can restore
2313 the register relative to the frame pointer. In theory, since each move
2314 is the same speed as a pop, and we don't need the leal, this is faster.
2315 For now restore multiple registers the old way. */
2316
2317 offset = - tsize - (nregs * UNITS_PER_WORD);
2318
2319 xops[2] = stack_pointer_rtx;
2320
2321 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2322 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2323 moved before any instruction which implicitly uses the got. This
2324 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2325
2326 Alternatively, this could be fixed by making the dependence on the
2327 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2328
2329 if (flag_pic || profile_flag || profile_block_flag)
2330 emit_insn (gen_blockage ());
2331
2332 if (nregs > 1 || ! frame_pointer_needed)
2333 {
2334 if (frame_pointer_needed)
2335 {
2336 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2337 if (do_rtl)
2338 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2339 else
2340 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2341 }
2342
2343 for (regno = 0; regno < limit; regno++)
2344 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2345 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2346 {
2347 xops[0] = gen_rtx_REG (SImode, regno);
2348
2349 if (do_rtl)
2350 emit_insn (gen_pop (xops[0]));
2351 else
2352 output_asm_insn ("pop%L0 %0", xops);
2353 }
2354 }
2355
2356 else
2357 for (regno = 0; regno < limit; regno++)
2358 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2359 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2360 {
2361 xops[0] = gen_rtx_REG (SImode, regno);
2362 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2363
2364 if (do_rtl)
2365 emit_move_insn (xops[0], xops[1]);
2366 else
2367 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2368
2369 offset += 4;
2370 }
2371
2372 if (frame_pointer_needed)
2373 {
2374 /* If not an i386, mov & pop is faster than "leave". */
2375
2376 if (TARGET_USE_LEAVE)
2377 {
2378 if (do_rtl)
2379 emit_insn (gen_leave());
2380 else
2381 output_asm_insn ("leave", xops);
2382 }
2383 else
2384 {
2385 xops[0] = frame_pointer_rtx;
2386 xops[1] = stack_pointer_rtx;
2387
2388 if (do_rtl)
2389 {
2390 emit_insn (gen_epilogue_set_stack_ptr());
2391 emit_insn (gen_pop (xops[0]));
2392 }
2393 else
2394 {
2395 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2396 output_asm_insn ("pop%L0 %0", xops);
2397 }
2398 }
2399 }
2400
2401 else if (tsize)
2402 {
2403 /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2404 use `pop' and not `add'. */
2405 int use_pop = tsize == 4;
2406
2407 /* Use two pops only for the Pentium processors. */
2408 if (tsize == 8 && !TARGET_386 && !TARGET_486)
2409 {
2410 rtx retval = current_function_return_rtx;
2411
2412 xops[1] = gen_rtx_REG (SImode, 1); /* %edx */
2413
2414 /* This case is a bit more complex. Since we cannot pop into
2415 %ecx twice we need a second register. But this is only
2416 available if the return value is not of DImode in which
2417 case the %edx register is not available. */
2418 use_pop = (retval == NULL
2419 || ! reg_overlap_mentioned_p (xops[1], retval));
2420 }
2421
2422 if (use_pop)
2423 {
2424 xops[0] = gen_rtx_REG (SImode, 2); /* %ecx */
2425
2426 if (do_rtl)
2427 {
2428 /* We have to prevent the two pops here from being scheduled.
2429 GCC otherwise would try in some situation to put other
2430 instructions in between them which has a bad effect. */
2431 emit_insn (gen_blockage ());
2432 emit_insn (gen_pop (xops[0]));
2433 if (tsize == 8)
2434 emit_insn (gen_pop (xops[1]));
2435 }
2436 else
2437 {
2438 output_asm_insn ("pop%L0 %0", xops);
2439 if (tsize == 8)
2440 output_asm_insn ("pop%L1 %1", xops);
2441 }
2442 }
2443 else
2444 {
2445 /* If there is no frame pointer, we must still release the frame. */
2446 xops[0] = GEN_INT (tsize);
2447
2448 if (do_rtl)
2449 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2450 gen_rtx (PLUS, SImode, xops[2], xops[0])));
2451 else
2452 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2453 }
2454 }
2455
2456 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2457 if (profile_block_flag == 2)
2458 {
2459 FUNCTION_BLOCK_PROFILER_EXIT(file);
2460 }
2461 #endif
2462
2463 if (current_function_pops_args && current_function_args_size)
2464 {
2465 xops[1] = GEN_INT (current_function_pops_args);
2466
2467 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2468 asked to pop more, pop return address, do explicit add, and jump
2469 indirectly to the caller. */
2470
2471 if (current_function_pops_args >= 32768)
2472 {
2473 /* ??? Which register to use here? */
2474 xops[0] = gen_rtx_REG (SImode, 2);
2475
2476 if (do_rtl)
2477 {
2478 emit_insn (gen_pop (xops[0]));
2479 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2480 gen_rtx (PLUS, SImode, xops[1], xops[2])));
2481 emit_jump_insn (xops[0]);
2482 }
2483 else
2484 {
2485 output_asm_insn ("pop%L0 %0", xops);
2486 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2487 output_asm_insn ("jmp %*%0", xops);
2488 }
2489 }
2490 else
2491 {
2492 if (do_rtl)
2493 emit_jump_insn (gen_return_pop_internal (xops[1]));
2494 else
2495 output_asm_insn ("ret %1", xops);
2496 }
2497 }
2498 else
2499 {
2500 if (do_rtl)
2501 emit_jump_insn (gen_return_internal ());
2502 else
2503 output_asm_insn ("ret", xops);
2504 }
2505 }
2506 \f
2507 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2508 that is a valid memory address for an instruction.
2509 The MODE argument is the machine mode for the MEM expression
2510 that wants to use this address.
2511
2512 On x86, legitimate addresses are:
2513 base movl (base),reg
2514 displacement movl disp,reg
2515 base + displacement movl disp(base),reg
2516 index + base movl (base,index),reg
2517 (index + base) + displacement movl disp(base,index),reg
2518 index*scale movl (,index,scale),reg
2519 index*scale + disp movl disp(,index,scale),reg
2520 index*scale + base movl (base,index,scale),reg
2521 (index*scale + base) + disp movl disp(base,index,scale),reg
2522
2523 In each case, scale can be 1, 2, 4, 8. */
2524
2525 /* This is exactly the same as print_operand_addr, except that
2526 it recognizes addresses instead of printing them.
2527
2528 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2529 convert common non-canonical forms to canonical form so that they will
2530 be recognized. */
2531
2532 #define ADDR_INVALID(msg,insn) \
2533 do { \
2534 if (TARGET_DEBUG_ADDR) \
2535 { \
2536 fprintf (stderr, msg); \
2537 debug_rtx (insn); \
2538 } \
2539 } while (0)
2540
2541 static int
2542 legitimate_pic_address_disp_p (disp)
2543 register rtx disp;
2544 {
2545 if (GET_CODE (disp) != CONST)
2546 return 0;
2547 disp = XEXP (disp, 0);
2548
2549 if (GET_CODE (disp) == PLUS)
2550 {
2551 if (GET_CODE (XEXP (disp, 1)) != CONST_INT)
2552 return 0;
2553 disp = XEXP (disp, 0);
2554 }
2555
2556 if (GET_CODE (disp) != UNSPEC
2557 || XVECLEN (disp, 0) != 1)
2558 return 0;
2559
2560 /* Must be @GOT or @GOTOFF. */
2561 if (XINT (disp, 1) != 6
2562 && XINT (disp, 1) != 7)
2563 return 0;
2564
2565 if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
2566 && GET_CODE (XVECEXP (disp, 0, 0)) != LABEL_REF)
2567 return 0;
2568
2569 return 1;
2570 }
2571
2572 int
2573 legitimate_address_p (mode, addr, strict)
2574 enum machine_mode mode;
2575 register rtx addr;
2576 int strict;
2577 {
2578 rtx base = NULL_RTX;
2579 rtx indx = NULL_RTX;
2580 rtx scale = NULL_RTX;
2581 rtx disp = NULL_RTX;
2582
2583 if (TARGET_DEBUG_ADDR)
2584 {
2585 fprintf (stderr,
2586 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2587 GET_MODE_NAME (mode), strict);
2588
2589 debug_rtx (addr);
2590 }
2591
2592 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2593 base = addr;
2594
2595 else if (GET_CODE (addr) == PLUS)
2596 {
2597 rtx op0 = XEXP (addr, 0);
2598 rtx op1 = XEXP (addr, 1);
2599 enum rtx_code code0 = GET_CODE (op0);
2600 enum rtx_code code1 = GET_CODE (op1);
2601
2602 if (code0 == REG || code0 == SUBREG)
2603 {
2604 if (code1 == REG || code1 == SUBREG)
2605 {
2606 indx = op0; /* index + base */
2607 base = op1;
2608 }
2609
2610 else
2611 {
2612 base = op0; /* base + displacement */
2613 disp = op1;
2614 }
2615 }
2616
2617 else if (code0 == MULT)
2618 {
2619 indx = XEXP (op0, 0);
2620 scale = XEXP (op0, 1);
2621
2622 if (code1 == REG || code1 == SUBREG)
2623 base = op1; /* index*scale + base */
2624
2625 else
2626 disp = op1; /* index*scale + disp */
2627 }
2628
2629 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2630 {
2631 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2632 scale = XEXP (XEXP (op0, 0), 1);
2633 base = XEXP (op0, 1);
2634 disp = op1;
2635 }
2636
2637 else if (code0 == PLUS)
2638 {
2639 indx = XEXP (op0, 0); /* index + base + disp */
2640 base = XEXP (op0, 1);
2641 disp = op1;
2642 }
2643
2644 else
2645 {
2646 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2647 return FALSE;
2648 }
2649 }
2650
2651 else if (GET_CODE (addr) == MULT)
2652 {
2653 indx = XEXP (addr, 0); /* index*scale */
2654 scale = XEXP (addr, 1);
2655 }
2656
2657 else
2658 disp = addr; /* displacement */
2659
2660 /* Allow arg pointer and stack pointer as index if there is not scaling */
2661 if (base && indx && !scale
2662 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2663 {
2664 rtx tmp = base;
2665 base = indx;
2666 indx = tmp;
2667 }
2668
2669 /* Validate base register:
2670
2671 Don't allow SUBREG's here, it can lead to spill failures when the base
2672 is one word out of a two word structure, which is represented internally
2673 as a DImode int. */
2674
2675 if (base)
2676 {
2677 if (GET_CODE (base) != REG)
2678 {
2679 ADDR_INVALID ("Base is not a register.\n", base);
2680 return FALSE;
2681 }
2682
2683 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2684 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2685 {
2686 ADDR_INVALID ("Base is not valid.\n", base);
2687 return FALSE;
2688 }
2689 }
2690
2691 /* Validate index register:
2692
2693 Don't allow SUBREG's here, it can lead to spill failures when the index
2694 is one word out of a two word structure, which is represented internally
2695 as a DImode int. */
2696 if (indx)
2697 {
2698 if (GET_CODE (indx) != REG)
2699 {
2700 ADDR_INVALID ("Index is not a register.\n", indx);
2701 return FALSE;
2702 }
2703
2704 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2705 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2706 {
2707 ADDR_INVALID ("Index is not valid.\n", indx);
2708 return FALSE;
2709 }
2710 }
2711 else if (scale)
2712 abort (); /* scale w/o index invalid */
2713
2714 /* Validate scale factor: */
2715 if (scale)
2716 {
2717 HOST_WIDE_INT value;
2718
2719 if (GET_CODE (scale) != CONST_INT)
2720 {
2721 ADDR_INVALID ("Scale is not valid.\n", scale);
2722 return FALSE;
2723 }
2724
2725 value = INTVAL (scale);
2726 if (value != 1 && value != 2 && value != 4 && value != 8)
2727 {
2728 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2729 return FALSE;
2730 }
2731 }
2732
2733 /* Validate displacement. */
2734 if (disp)
2735 {
2736 if (!CONSTANT_ADDRESS_P (disp))
2737 {
2738 ADDR_INVALID ("Displacement is not valid.\n", disp);
2739 return FALSE;
2740 }
2741
2742 else if (GET_CODE (disp) == CONST_DOUBLE)
2743 {
2744 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2745 return FALSE;
2746 }
2747
2748 if (flag_pic && SYMBOLIC_CONST (disp))
2749 {
2750 if (! legitimate_pic_address_disp_p (disp))
2751 {
2752 ADDR_INVALID ("Displacement is an invalid PIC construct.\n",
2753 disp);
2754 return FALSE;
2755 }
2756
2757 if (base != pic_offset_table_rtx
2758 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2759 {
2760 ADDR_INVALID ("PIC displacement against invalid base.\n", disp);
2761 return FALSE;
2762 }
2763 }
2764
2765 else if (HALF_PIC_P ())
2766 {
2767 if (! HALF_PIC_ADDRESS_P (disp)
2768 || (base != NULL_RTX || indx != NULL_RTX))
2769 {
2770 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2771 disp);
2772 return FALSE;
2773 }
2774 }
2775 }
2776
2777 if (TARGET_DEBUG_ADDR)
2778 fprintf (stderr, "Address is valid.\n");
2779
2780 /* Everything looks valid, return true */
2781 return TRUE;
2782 }
2783 \f
2784 /* Return a legitimate reference for ORIG (an address) using the
2785 register REG. If REG is 0, a new pseudo is generated.
2786
2787 There are two types of references that must be handled:
2788
2789 1. Global data references must load the address from the GOT, via
2790 the PIC reg. An insn is emitted to do this load, and the reg is
2791 returned.
2792
2793 2. Static data references, constant pool addresses, and code labels
2794 compute the address as an offset from the GOT, whose base is in
2795 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
2796 differentiate them from global data objects. The returned
2797 address is the PIC reg + an unspec constant.
2798
2799 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2800 reg also appears in the address. */
2801
2802 rtx
2803 legitimize_pic_address (orig, reg)
2804 rtx orig;
2805 rtx reg;
2806 {
2807 rtx addr = orig;
2808 rtx new = orig;
2809 rtx base;
2810
2811 if (GET_CODE (addr) == LABEL_REF
2812 || (GET_CODE (addr) == SYMBOL_REF
2813 && (CONSTANT_POOL_ADDRESS_P (addr)
2814 || SYMBOL_REF_FLAG (addr))))
2815 {
2816 /* This symbol may be referenced via a displacement from the PIC
2817 base address (@GOTOFF). */
2818
2819 current_function_uses_pic_offset_table = 1;
2820 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 7);
2821 new = gen_rtx_CONST (VOIDmode, new);
2822 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2823
2824 if (reg != 0)
2825 {
2826 emit_move_insn (reg, new);
2827 new = reg;
2828 }
2829 }
2830 else if (GET_CODE (addr) == SYMBOL_REF)
2831 {
2832 /* This symbol must be referenced via a load from the
2833 Global Offset Table (@GOT). */
2834
2835 current_function_uses_pic_offset_table = 1;
2836 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 6);
2837 new = gen_rtx_CONST (VOIDmode, new);
2838 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2839 new = gen_rtx_MEM (Pmode, new);
2840 RTX_UNCHANGING_P (new) = 1;
2841
2842 if (reg == 0)
2843 reg = gen_reg_rtx (Pmode);
2844 emit_move_insn (reg, new);
2845 new = reg;
2846 }
2847 else
2848 {
2849 if (GET_CODE (addr) == CONST)
2850 {
2851 addr = XEXP (addr, 0);
2852 if (GET_CODE (addr) == UNSPEC)
2853 {
2854 /* Check that the unspec is one of the ones we generate? */
2855 }
2856 else if (GET_CODE (addr) != PLUS)
2857 abort();
2858 }
2859 if (GET_CODE (addr) == PLUS)
2860 {
2861 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2862
2863 /* Check first to see if this is a constant offset from a @GOTOFF
2864 symbol reference. */
2865 if ((GET_CODE (op0) == LABEL_REF
2866 || (GET_CODE (op0) == SYMBOL_REF
2867 && (CONSTANT_POOL_ADDRESS_P (op0)
2868 || SYMBOL_REF_FLAG (op0))))
2869 && GET_CODE (op1) == CONST_INT)
2870 {
2871 current_function_uses_pic_offset_table = 1;
2872 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, op0), 7);
2873 new = gen_rtx_PLUS (VOIDmode, new, op1);
2874 new = gen_rtx_CONST (VOIDmode, new);
2875 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2876
2877 if (reg != 0)
2878 {
2879 emit_move_insn (reg, new);
2880 new = reg;
2881 }
2882 }
2883 else
2884 {
2885 base = legitimize_pic_address (XEXP (addr, 0), reg);
2886 new = legitimize_pic_address (XEXP (addr, 1),
2887 base == reg ? NULL_RTX : reg);
2888
2889 if (GET_CODE (new) == CONST_INT)
2890 new = plus_constant (base, INTVAL (new));
2891 else
2892 {
2893 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2894 {
2895 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2896 new = XEXP (new, 1);
2897 }
2898 new = gen_rtx_PLUS (Pmode, base, new);
2899 }
2900 }
2901 }
2902 }
2903 return new;
2904 }
2905 \f
2906 /* Emit insns to move operands[1] into operands[0]. */
2907
2908 void
2909 emit_pic_move (operands, mode)
2910 rtx *operands;
2911 enum machine_mode mode ATTRIBUTE_UNUSED;
2912 {
2913 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2914
2915 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2916 operands[1] = force_reg (Pmode, operands[1]);
2917 else
2918 operands[1] = legitimize_pic_address (operands[1], temp);
2919 }
2920 \f
2921 /* Try machine-dependent ways of modifying an illegitimate address
2922 to be legitimate. If we find one, return the new, valid address.
2923 This macro is used in only one place: `memory_address' in explow.c.
2924
2925 OLDX is the address as it was before break_out_memory_refs was called.
2926 In some cases it is useful to look at this to decide what needs to be done.
2927
2928 MODE and WIN are passed so that this macro can use
2929 GO_IF_LEGITIMATE_ADDRESS.
2930
2931 It is always safe for this macro to do nothing. It exists to recognize
2932 opportunities to optimize the output.
2933
2934 For the 80386, we handle X+REG by loading X into a register R and
2935 using R+REG. R will go in a general reg and indexing will be used.
2936 However, if REG is a broken-out memory address or multiplication,
2937 nothing needs to be done because REG can certainly go in a general reg.
2938
2939 When -fpic is used, special handling is needed for symbolic references.
2940 See comments by legitimize_pic_address in i386.c for details. */
2941
2942 rtx
2943 legitimize_address (x, oldx, mode)
2944 register rtx x;
2945 register rtx oldx ATTRIBUTE_UNUSED;
2946 enum machine_mode mode;
2947 {
2948 int changed = 0;
2949 unsigned log;
2950
2951 if (TARGET_DEBUG_ADDR)
2952 {
2953 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2954 GET_MODE_NAME (mode));
2955 debug_rtx (x);
2956 }
2957
2958 if (flag_pic && SYMBOLIC_CONST (x))
2959 return legitimize_pic_address (x, 0);
2960
2961 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2962 if (GET_CODE (x) == ASHIFT
2963 && GET_CODE (XEXP (x, 1)) == CONST_INT
2964 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2965 {
2966 changed = 1;
2967 x = gen_rtx_MULT (Pmode, force_reg (Pmode, XEXP (x, 0)),
2968 GEN_INT (1 << log));
2969 }
2970
2971 if (GET_CODE (x) == PLUS)
2972 {
2973 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2974
2975 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2977 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2978 {
2979 changed = 1;
2980 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2981 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2982 GEN_INT (1 << log));
2983 }
2984
2985 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2986 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2987 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2988 {
2989 changed = 1;
2990 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2991 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2992 GEN_INT (1 << log));
2993 }
2994
2995 /* Put multiply first if it isn't already. */
2996 if (GET_CODE (XEXP (x, 1)) == MULT)
2997 {
2998 rtx tmp = XEXP (x, 0);
2999 XEXP (x, 0) = XEXP (x, 1);
3000 XEXP (x, 1) = tmp;
3001 changed = 1;
3002 }
3003
3004 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
3005 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
3006 created by virtual register instantiation, register elimination, and
3007 similar optimizations. */
3008 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
3009 {
3010 changed = 1;
3011 x = gen_rtx (PLUS, Pmode,
3012 gen_rtx (PLUS, Pmode, XEXP (x, 0),
3013 XEXP (XEXP (x, 1), 0)),
3014 XEXP (XEXP (x, 1), 1));
3015 }
3016
3017 /* Canonicalize
3018 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
3019 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
3020 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
3021 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3022 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
3023 && CONSTANT_P (XEXP (x, 1)))
3024 {
3025 rtx constant;
3026 rtx other = NULL_RTX;
3027
3028 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3029 {
3030 constant = XEXP (x, 1);
3031 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
3032 }
3033 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
3034 {
3035 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
3036 other = XEXP (x, 1);
3037 }
3038 else
3039 constant = 0;
3040
3041 if (constant)
3042 {
3043 changed = 1;
3044 x = gen_rtx (PLUS, Pmode,
3045 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
3046 XEXP (XEXP (XEXP (x, 0), 1), 0)),
3047 plus_constant (other, INTVAL (constant)));
3048 }
3049 }
3050
3051 if (changed && legitimate_address_p (mode, x, FALSE))
3052 return x;
3053
3054 if (GET_CODE (XEXP (x, 0)) == MULT)
3055 {
3056 changed = 1;
3057 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
3058 }
3059
3060 if (GET_CODE (XEXP (x, 1)) == MULT)
3061 {
3062 changed = 1;
3063 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
3064 }
3065
3066 if (changed
3067 && GET_CODE (XEXP (x, 1)) == REG
3068 && GET_CODE (XEXP (x, 0)) == REG)
3069 return x;
3070
3071 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
3072 {
3073 changed = 1;
3074 x = legitimize_pic_address (x, 0);
3075 }
3076
3077 if (changed && legitimate_address_p (mode, x, FALSE))
3078 return x;
3079
3080 if (GET_CODE (XEXP (x, 0)) == REG)
3081 {
3082 register rtx temp = gen_reg_rtx (Pmode);
3083 register rtx val = force_operand (XEXP (x, 1), temp);
3084 if (val != temp)
3085 emit_move_insn (temp, val);
3086
3087 XEXP (x, 1) = temp;
3088 return x;
3089 }
3090
3091 else if (GET_CODE (XEXP (x, 1)) == REG)
3092 {
3093 register rtx temp = gen_reg_rtx (Pmode);
3094 register rtx val = force_operand (XEXP (x, 0), temp);
3095 if (val != temp)
3096 emit_move_insn (temp, val);
3097
3098 XEXP (x, 0) = temp;
3099 return x;
3100 }
3101 }
3102
3103 return x;
3104 }
3105 \f
3106 /* Print an integer constant expression in assembler syntax. Addition
3107 and subtraction are the only arithmetic that may appear in these
3108 expressions. FILE is the stdio stream to write to, X is the rtx, and
3109 CODE is the operand print code from the output string. */
3110
3111 static void
3112 output_pic_addr_const (file, x, code)
3113 FILE *file;
3114 rtx x;
3115 int code;
3116 {
3117 char buf[256];
3118
3119 switch (GET_CODE (x))
3120 {
3121 case PC:
3122 if (flag_pic)
3123 putc ('.', file);
3124 else
3125 abort ();
3126 break;
3127
3128 case SYMBOL_REF:
3129 assemble_name (file, XSTR (x, 0));
3130 if (code == 'P' && ! SYMBOL_REF_FLAG (x))
3131 fputs ("@PLT", file);
3132 break;
3133
3134 case LABEL_REF:
3135 x = XEXP (x, 0);
3136 /* FALLTHRU */
3137 case CODE_LABEL:
3138 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3139 assemble_name (asm_out_file, buf);
3140 break;
3141
3142 case CONST_INT:
3143 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3144 break;
3145
3146 case CONST:
3147 /* This used to output parentheses around the expression,
3148 but that does not work on the 386 (either ATT or BSD assembler). */
3149 output_pic_addr_const (file, XEXP (x, 0), code);
3150 break;
3151
3152 case CONST_DOUBLE:
3153 if (GET_MODE (x) == VOIDmode)
3154 {
3155 /* We can use %d if the number is <32 bits and positive. */
3156 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3157 fprintf (file, "0x%lx%08lx",
3158 (unsigned long) CONST_DOUBLE_HIGH (x),
3159 (unsigned long) CONST_DOUBLE_LOW (x));
3160 else
3161 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3162 }
3163 else
3164 /* We can't handle floating point constants;
3165 PRINT_OPERAND must handle them. */
3166 output_operand_lossage ("floating constant misused");
3167 break;
3168
3169 case PLUS:
3170 /* Some assemblers need integer constants to appear first. */
3171 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3172 {
3173 output_pic_addr_const (file, XEXP (x, 0), code);
3174 fprintf (file, "+");
3175 output_pic_addr_const (file, XEXP (x, 1), code);
3176 }
3177 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3178 {
3179 output_pic_addr_const (file, XEXP (x, 1), code);
3180 fprintf (file, "+");
3181 output_pic_addr_const (file, XEXP (x, 0), code);
3182 }
3183 else
3184 abort ();
3185 break;
3186
3187 case MINUS:
3188 output_pic_addr_const (file, XEXP (x, 0), code);
3189 fprintf (file, "-");
3190 output_pic_addr_const (file, XEXP (x, 1), code);
3191 break;
3192
3193 case UNSPEC:
3194 if (XVECLEN (x, 0) != 1)
3195 abort ();
3196 output_pic_addr_const (file, XVECEXP (x, 0, 0), code);
3197 switch (XINT (x, 1))
3198 {
3199 case 6:
3200 fputs ("@GOT", file);
3201 break;
3202 case 7:
3203 fputs ("@GOTOFF", file);
3204 break;
3205 case 8:
3206 fputs ("@PLT", file);
3207 break;
3208 default:
3209 output_operand_lossage ("invalid UNSPEC as operand");
3210 break;
3211 }
3212 break;
3213
3214 default:
3215 output_operand_lossage ("invalid expression as operand");
3216 }
3217 }
3218 \f
3219 static void
3220 put_jump_code (code, reverse, file)
3221 enum rtx_code code;
3222 int reverse;
3223 FILE *file;
3224 {
3225 int flags = cc_prev_status.flags;
3226 int ieee = (TARGET_IEEE_FP && (flags & CC_IN_80387));
3227 const char *suffix;
3228
3229 if (flags & CC_Z_IN_NOT_C)
3230 switch (code)
3231 {
3232 case EQ:
3233 fputs (reverse ? "c" : "nc", file);
3234 return;
3235
3236 case NE:
3237 fputs (reverse ? "nc" : "c", file);
3238 return;
3239
3240 default:
3241 abort ();
3242 }
3243 if (ieee)
3244 {
3245 switch (code)
3246 {
3247 case LE:
3248 suffix = reverse ? "ae" : "b";
3249 break;
3250 case GT:
3251 case LT:
3252 case GE:
3253 suffix = reverse ? "ne" : "e";
3254 break;
3255 case EQ:
3256 suffix = reverse ? "ne" : "e";
3257 break;
3258 case NE:
3259 suffix = reverse ? "e" : "ne";
3260 break;
3261 default:
3262 abort ();
3263 }
3264 fputs (suffix, file);
3265 return;
3266 }
3267 if (flags & CC_TEST_AX)
3268 abort();
3269 if ((flags & CC_NO_OVERFLOW) && (code == LE || code == GT))
3270 abort ();
3271 if (reverse)
3272 code = reverse_condition (code);
3273 switch (code)
3274 {
3275 case EQ:
3276 suffix = "e";
3277 break;
3278
3279 case NE:
3280 suffix = "ne";
3281 break;
3282
3283 case GT:
3284 suffix = flags & CC_IN_80387 ? "a" : "g";
3285 break;
3286
3287 case GTU:
3288 suffix = "a";
3289 break;
3290
3291 case LT:
3292 if (flags & CC_NO_OVERFLOW)
3293 suffix = "s";
3294 else
3295 suffix = flags & CC_IN_80387 ? "b" : "l";
3296 break;
3297
3298 case LTU:
3299 suffix = "b";
3300 break;
3301
3302 case GE:
3303 if (flags & CC_NO_OVERFLOW)
3304 suffix = "ns";
3305 else
3306 suffix = flags & CC_IN_80387 ? "ae" : "ge";
3307 break;
3308
3309 case GEU:
3310 suffix = "ae";
3311 break;
3312
3313 case LE:
3314 suffix = flags & CC_IN_80387 ? "be" : "le";
3315 break;
3316
3317 case LEU:
3318 suffix = "be";
3319 break;
3320
3321 default:
3322 abort ();
3323 }
3324 fputs (suffix, file);
3325 }
3326
3327 /* Append the correct conditional move suffix which corresponds to CODE. */
3328
3329 static void
3330 put_condition_code (code, reverse_cc, mode, file)
3331 enum rtx_code code;
3332 int reverse_cc;
3333 enum mode_class mode;
3334 FILE * file;
3335 {
3336 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3337 && ! (cc_prev_status.flags & CC_FCOMI));
3338 if (reverse_cc && ! ieee)
3339 code = reverse_condition (code);
3340
3341 if (mode == MODE_INT)
3342 switch (code)
3343 {
3344 case NE:
3345 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3346 fputs ("b", file);
3347 else
3348 fputs ("ne", file);
3349 return;
3350
3351 case EQ:
3352 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3353 fputs ("ae", file);
3354 else
3355 fputs ("e", file);
3356 return;
3357
3358 case GE:
3359 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3360 fputs ("ns", file);
3361 else
3362 fputs ("ge", file);
3363 return;
3364
3365 case GT:
3366 fputs ("g", file);
3367 return;
3368
3369 case LE:
3370 fputs ("le", file);
3371 return;
3372
3373 case LT:
3374 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3375 fputs ("s", file);
3376 else
3377 fputs ("l", file);
3378 return;
3379
3380 case GEU:
3381 fputs ("ae", file);
3382 return;
3383
3384 case GTU:
3385 fputs ("a", file);
3386 return;
3387
3388 case LEU:
3389 fputs ("be", file);
3390 return;
3391
3392 case LTU:
3393 fputs ("b", file);
3394 return;
3395
3396 default:
3397 output_operand_lossage ("Invalid %%C operand");
3398 }
3399
3400 else if (mode == MODE_FLOAT)
3401 switch (code)
3402 {
3403 case NE:
3404 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3405 return;
3406 case EQ:
3407 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3408 return;
3409 case GE:
3410 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3411 return;
3412 case GT:
3413 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3414 return;
3415 case LE:
3416 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3417 return;
3418 case LT:
3419 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3420 return;
3421 case GEU:
3422 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3423 return;
3424 case GTU:
3425 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3426 return;
3427 case LEU:
3428 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3429 return;
3430 case LTU:
3431 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3432 return;
3433 default:
3434 output_operand_lossage ("Invalid %%C operand");
3435 }
3436 }
3437
3438 /* Meaning of CODE:
3439 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3440 C -- print opcode suffix for set/cmov insn.
3441 c -- like C, but print reversed condition
3442 F -- print opcode suffix for fcmov insn.
3443 f -- like F, but print reversed condition
3444 D -- print the opcode suffix for a jump
3445 d -- like D, but print reversed condition
3446 R -- print the prefix for register names.
3447 z -- print the opcode suffix for the size of the current operand.
3448 * -- print a star (in certain assembler syntax)
3449 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3450 J -- print the appropriate jump operand.
3451 s -- print a shift double count, followed by the assemblers argument
3452 delimiter.
3453 b -- print the QImode name of the register for the indicated operand.
3454 %b0 would print %al if operands[0] is reg 0.
3455 w -- likewise, print the HImode name of the register.
3456 k -- likewise, print the SImode name of the register.
3457 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3458 y -- print "st(0)" instead of "st" as a register.
3459 P -- print as a PIC constant */
3460
3461 void
3462 print_operand (file, x, code)
3463 FILE *file;
3464 rtx x;
3465 int code;
3466 {
3467 if (code)
3468 {
3469 switch (code)
3470 {
3471 case '*':
3472 if (USE_STAR)
3473 putc ('*', file);
3474 return;
3475
3476 case 'L':
3477 PUT_OP_SIZE (code, 'l', file);
3478 return;
3479
3480 case 'W':
3481 PUT_OP_SIZE (code, 'w', file);
3482 return;
3483
3484 case 'B':
3485 PUT_OP_SIZE (code, 'b', file);
3486 return;
3487
3488 case 'Q':
3489 PUT_OP_SIZE (code, 'l', file);
3490 return;
3491
3492 case 'S':
3493 PUT_OP_SIZE (code, 's', file);
3494 return;
3495
3496 case 'T':
3497 PUT_OP_SIZE (code, 't', file);
3498 return;
3499
3500 case 'z':
3501 /* 387 opcodes don't get size suffixes if the operands are
3502 registers. */
3503
3504 if (STACK_REG_P (x))
3505 return;
3506
3507 /* this is the size of op from size of operand */
3508 switch (GET_MODE_SIZE (GET_MODE (x)))
3509 {
3510 case 1:
3511 PUT_OP_SIZE ('B', 'b', file);
3512 return;
3513
3514 case 2:
3515 PUT_OP_SIZE ('W', 'w', file);
3516 return;
3517
3518 case 4:
3519 if (GET_MODE (x) == SFmode)
3520 {
3521 PUT_OP_SIZE ('S', 's', file);
3522 return;
3523 }
3524 else
3525 PUT_OP_SIZE ('L', 'l', file);
3526 return;
3527
3528 case 12:
3529 PUT_OP_SIZE ('T', 't', file);
3530 return;
3531
3532 case 8:
3533 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3534 {
3535 #ifdef GAS_MNEMONICS
3536 PUT_OP_SIZE ('Q', 'q', file);
3537 return;
3538 #else
3539 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3540 #endif
3541 }
3542
3543 PUT_OP_SIZE ('Q', 'l', file);
3544 return;
3545 }
3546
3547 case 'b':
3548 case 'w':
3549 case 'k':
3550 case 'h':
3551 case 'y':
3552 case 'P':
3553 case 'X':
3554 break;
3555
3556 case 'J':
3557 switch (GET_CODE (x))
3558 {
3559 /* These conditions are appropriate for testing the result
3560 of an arithmetic operation, not for a compare operation.
3561 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3562 CC_Z_IN_NOT_C false and not floating point. */
3563 case NE: fputs ("jne", file); return;
3564 case EQ: fputs ("je", file); return;
3565 case GE: fputs ("jns", file); return;
3566 case LT: fputs ("js", file); return;
3567 case GEU: fputs ("jmp", file); return;
3568 case GTU: fputs ("jne", file); return;
3569 case LEU: fputs ("je", file); return;
3570 case LTU: fputs ("#branch never", file); return;
3571
3572 /* no matching branches for GT nor LE */
3573
3574 default:
3575 abort ();
3576 }
3577
3578 case 's':
3579 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3580 {
3581 PRINT_OPERAND (file, x, 0);
3582 fputs (AS2C (,) + 1, file);
3583 }
3584
3585 return;
3586
3587 case 'D':
3588 put_jump_code (GET_CODE (x), 0, file);
3589 return;
3590
3591 case 'd':
3592 put_jump_code (GET_CODE (x), 1, file);
3593 return;
3594
3595 /* This is used by the conditional move instructions. */
3596 case 'C':
3597 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3598 return;
3599
3600 /* Like above, but reverse condition */
3601 case 'c':
3602 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3603
3604 case 'F':
3605 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3606 return;
3607
3608 /* Like above, but reverse condition */
3609 case 'f':
3610 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3611 return;
3612
3613 default:
3614 {
3615 char str[50];
3616
3617 sprintf (str, "invalid operand code `%c'", code);
3618 output_operand_lossage (str);
3619 }
3620 }
3621 }
3622
3623 if (GET_CODE (x) == REG)
3624 {
3625 PRINT_REG (x, code, file);
3626 }
3627
3628 else if (GET_CODE (x) == MEM)
3629 {
3630 PRINT_PTR (x, file);
3631 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3632 {
3633 if (flag_pic)
3634 output_pic_addr_const (file, XEXP (x, 0), code);
3635 else
3636 output_addr_const (file, XEXP (x, 0));
3637 }
3638 else
3639 output_address (XEXP (x, 0));
3640 }
3641
3642 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3643 {
3644 REAL_VALUE_TYPE r;
3645 long l;
3646
3647 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3648 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3649 PRINT_IMMED_PREFIX (file);
3650 fprintf (file, "0x%lx", l);
3651 }
3652
3653 /* These float cases don't actually occur as immediate operands. */
3654 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3655 {
3656 REAL_VALUE_TYPE r;
3657 char dstr[30];
3658
3659 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3660 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3661 fprintf (file, "%s", dstr);
3662 }
3663
3664 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3665 {
3666 REAL_VALUE_TYPE r;
3667 char dstr[30];
3668
3669 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3670 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3671 fprintf (file, "%s", dstr);
3672 }
3673 else
3674 {
3675 if (code != 'P')
3676 {
3677 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3678 PRINT_IMMED_PREFIX (file);
3679 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3680 || GET_CODE (x) == LABEL_REF)
3681 PRINT_OFFSET_PREFIX (file);
3682 }
3683 if (flag_pic)
3684 output_pic_addr_const (file, x, code);
3685 else
3686 output_addr_const (file, x);
3687 }
3688 }
3689 \f
3690 /* Print a memory operand whose address is ADDR. */
3691
3692 void
3693 print_operand_address (file, addr)
3694 FILE *file;
3695 register rtx addr;
3696 {
3697 register rtx reg1, reg2, breg, ireg;
3698 rtx offset;
3699
3700 switch (GET_CODE (addr))
3701 {
3702 case REG:
3703 ADDR_BEG (file);
3704 fprintf (file, "%se", RP);
3705 fputs (hi_reg_name[REGNO (addr)], file);
3706 ADDR_END (file);
3707 break;
3708
3709 case PLUS:
3710 reg1 = 0;
3711 reg2 = 0;
3712 ireg = 0;
3713 breg = 0;
3714 offset = 0;
3715 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3716 {
3717 offset = XEXP (addr, 0);
3718 addr = XEXP (addr, 1);
3719 }
3720 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3721 {
3722 offset = XEXP (addr, 1);
3723 addr = XEXP (addr, 0);
3724 }
3725
3726 if (GET_CODE (addr) != PLUS)
3727 ;
3728 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3729 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3730 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3731 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3732 else if (GET_CODE (XEXP (addr, 0)) == REG)
3733 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3734 else if (GET_CODE (XEXP (addr, 1)) == REG)
3735 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3736
3737 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3738 {
3739 if (reg1 == 0)
3740 reg1 = addr;
3741 else
3742 reg2 = addr;
3743
3744 addr = 0;
3745 }
3746
3747 if (offset != 0)
3748 {
3749 if (addr != 0)
3750 abort ();
3751 addr = offset;
3752 }
3753
3754 if ((reg1 && GET_CODE (reg1) == MULT)
3755 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3756 {
3757 breg = reg2;
3758 ireg = reg1;
3759 }
3760 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3761 {
3762 breg = reg1;
3763 ireg = reg2;
3764 }
3765
3766 if (ireg != 0 || breg != 0)
3767 {
3768 int scale = 1;
3769
3770 if (addr != 0)
3771 {
3772 if (flag_pic)
3773 output_pic_addr_const (file, addr, 0);
3774 else if (GET_CODE (addr) == LABEL_REF)
3775 output_asm_label (addr);
3776 else
3777 output_addr_const (file, addr);
3778 }
3779
3780 if (ireg != 0 && GET_CODE (ireg) == MULT)
3781 {
3782 scale = INTVAL (XEXP (ireg, 1));
3783 ireg = XEXP (ireg, 0);
3784 }
3785
3786 /* The stack pointer can only appear as a base register,
3787 never an index register, so exchange the regs if it is wrong. */
3788
3789 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3790 {
3791 rtx tmp;
3792
3793 tmp = breg;
3794 breg = ireg;
3795 ireg = tmp;
3796 }
3797
3798 /* output breg+ireg*scale */
3799 PRINT_B_I_S (breg, ireg, scale, file);
3800 break;
3801 }
3802
3803 case MULT:
3804 {
3805 int scale;
3806
3807 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3808 {
3809 scale = INTVAL (XEXP (addr, 0));
3810 ireg = XEXP (addr, 1);
3811 }
3812 else
3813 {
3814 scale = INTVAL (XEXP (addr, 1));
3815 ireg = XEXP (addr, 0);
3816 }
3817
3818 output_addr_const (file, const0_rtx);
3819 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3820 }
3821 break;
3822
3823 default:
3824 if (GET_CODE (addr) == CONST_INT
3825 && INTVAL (addr) < 0x8000
3826 && INTVAL (addr) >= -0x8000)
3827 fprintf (file, "%d", (int) INTVAL (addr));
3828 else
3829 {
3830 if (flag_pic)
3831 output_pic_addr_const (file, addr, 0);
3832 else
3833 output_addr_const (file, addr);
3834 }
3835 }
3836 }
3837 \f
3838 /* Set the cc_status for the results of an insn whose pattern is EXP.
3839 On the 80386, we assume that only test and compare insns, as well
3840 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3841 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3842 Also, we assume that jumps, moves and sCOND don't affect the condition
3843 codes. All else clobbers the condition codes, by assumption.
3844
3845 We assume that ALL integer add, minus, etc. instructions effect the
3846 condition codes. This MUST be consistent with i386.md.
3847
3848 We don't record any float test or compare - the redundant test &
3849 compare check in final.c does not handle stack-like regs correctly. */
3850
3851 void
3852 notice_update_cc (exp)
3853 rtx exp;
3854 {
3855 if (GET_CODE (exp) == SET)
3856 {
3857 /* Jumps do not alter the cc's. */
3858 if (SET_DEST (exp) == pc_rtx)
3859 return;
3860
3861 /* Moving register or memory into a register:
3862 it doesn't alter the cc's, but it might invalidate
3863 the RTX's which we remember the cc's came from.
3864 (Note that moving a constant 0 or 1 MAY set the cc's). */
3865 if (REG_P (SET_DEST (exp))
3866 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3867 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3868 || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3869 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3870 {
3871 if (cc_status.value1
3872 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3873 cc_status.value1 = 0;
3874
3875 if (cc_status.value2
3876 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3877 cc_status.value2 = 0;
3878
3879 return;
3880 }
3881
3882 /* Moving register into memory doesn't alter the cc's.
3883 It may invalidate the RTX's which we remember the cc's came from. */
3884 if (GET_CODE (SET_DEST (exp)) == MEM
3885 && (REG_P (SET_SRC (exp))
3886 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3887 {
3888 if (cc_status.value1
3889 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3890 cc_status.value1 = 0;
3891 if (cc_status.value2
3892 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3893 cc_status.value2 = 0;
3894
3895 return;
3896 }
3897
3898 /* Function calls clobber the cc's. */
3899 else if (GET_CODE (SET_SRC (exp)) == CALL)
3900 {
3901 CC_STATUS_INIT;
3902 return;
3903 }
3904
3905 /* Tests and compares set the cc's in predictable ways. */
3906 else if (SET_DEST (exp) == cc0_rtx)
3907 {
3908 CC_STATUS_INIT;
3909 cc_status.value1 = SET_SRC (exp);
3910 return;
3911 }
3912
3913 /* Certain instructions effect the condition codes. */
3914 else if (GET_MODE (SET_SRC (exp)) == SImode
3915 || GET_MODE (SET_SRC (exp)) == HImode
3916 || GET_MODE (SET_SRC (exp)) == QImode)
3917 switch (GET_CODE (SET_SRC (exp)))
3918 {
3919 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3920 /* Shifts on the 386 don't set the condition codes if the
3921 shift count is zero. */
3922 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3923 {
3924 CC_STATUS_INIT;
3925 break;
3926 }
3927
3928 /* We assume that the CONST_INT is non-zero (this rtx would
3929 have been deleted if it were zero. */
3930
3931 case PLUS: case MINUS: case NEG:
3932 case AND: case IOR: case XOR:
3933 cc_status.flags = CC_NO_OVERFLOW;
3934 cc_status.value1 = SET_SRC (exp);
3935 cc_status.value2 = SET_DEST (exp);
3936 break;
3937
3938 /* This is the bsf pattern used by ffs. */
3939 case UNSPEC:
3940 if (XINT (SET_SRC (exp), 1) == 5)
3941 {
3942 /* Only the Z flag is defined after bsf. */
3943 cc_status.flags
3944 = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3945 cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3946 cc_status.value2 = 0;
3947 break;
3948 }
3949 /* FALLTHRU */
3950
3951 default:
3952 CC_STATUS_INIT;
3953 }
3954 else
3955 {
3956 CC_STATUS_INIT;
3957 }
3958 }
3959 else if (GET_CODE (exp) == PARALLEL
3960 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3961 {
3962 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3963 return;
3964 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3965
3966 {
3967 CC_STATUS_INIT;
3968 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3969 {
3970 cc_status.flags |= CC_IN_80387;
3971 if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3972 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3973 cc_status.flags |= CC_FCOMI;
3974 }
3975 else
3976 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3977 return;
3978 }
3979
3980 CC_STATUS_INIT;
3981 }
3982 else
3983 {
3984 CC_STATUS_INIT;
3985 }
3986 }
3987 \f
3988 /* Split one or more DImode RTL references into pairs of SImode
3989 references. The RTL can be REG, offsettable MEM, integer constant, or
3990 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3991 split and "num" is its length. lo_half and hi_half are output arrays
3992 that parallel "operands". */
3993
3994 void
3995 split_di (operands, num, lo_half, hi_half)
3996 rtx operands[];
3997 int num;
3998 rtx lo_half[], hi_half[];
3999 {
4000 while (num--)
4001 {
4002 rtx op = operands[num];
4003 if (! reload_completed)
4004 {
4005 lo_half[num] = gen_lowpart (SImode, op);
4006 hi_half[num] = gen_highpart (SImode, op);
4007 }
4008 else if (GET_CODE (op) == REG)
4009 {
4010 lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
4011 hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
4012 }
4013 else if (CONSTANT_P (op))
4014 split_double (op, &lo_half[num], &hi_half[num]);
4015 else if (offsettable_memref_p (op))
4016 {
4017 rtx lo_addr = XEXP (op, 0);
4018 rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
4019 lo_half[num] = change_address (op, SImode, lo_addr);
4020 hi_half[num] = change_address (op, SImode, hi_addr);
4021 }
4022 else
4023 abort();
4024 }
4025 }
4026 \f
4027 /* Return 1 if this is a valid binary operation on a 387.
4028 OP is the expression matched, and MODE is its mode. */
4029
4030 int
4031 binary_387_op (op, mode)
4032 register rtx op;
4033 enum machine_mode mode;
4034 {
4035 if (mode != VOIDmode && mode != GET_MODE (op))
4036 return 0;
4037
4038 switch (GET_CODE (op))
4039 {
4040 case PLUS:
4041 case MINUS:
4042 case MULT:
4043 case DIV:
4044 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
4045
4046 default:
4047 return 0;
4048 }
4049 }
4050 \f
4051 /* Return 1 if this is a valid shift or rotate operation on a 386.
4052 OP is the expression matched, and MODE is its mode. */
4053
4054 int
4055 shift_op (op, mode)
4056 register rtx op;
4057 enum machine_mode mode;
4058 {
4059 rtx operand = XEXP (op, 0);
4060
4061 if (mode != VOIDmode && mode != GET_MODE (op))
4062 return 0;
4063
4064 if (GET_MODE (operand) != GET_MODE (op)
4065 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
4066 return 0;
4067
4068 return (GET_CODE (op) == ASHIFT
4069 || GET_CODE (op) == ASHIFTRT
4070 || GET_CODE (op) == LSHIFTRT
4071 || GET_CODE (op) == ROTATE
4072 || GET_CODE (op) == ROTATERT);
4073 }
4074
4075 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
4076 MODE is not used. */
4077
4078 int
4079 VOIDmode_compare_op (op, mode)
4080 register rtx op;
4081 enum machine_mode mode ATTRIBUTE_UNUSED;
4082 {
4083 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
4084 }
4085 \f
4086 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
4087 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
4088 is the expression of the binary operation. The output may either be
4089 emitted here, or returned to the caller, like all output_* functions.
4090
4091 There is no guarantee that the operands are the same mode, as they
4092 might be within FLOAT or FLOAT_EXTEND expressions. */
4093
4094 char *
4095 output_387_binary_op (insn, operands)
4096 rtx insn;
4097 rtx *operands;
4098 {
4099 rtx temp;
4100 char *base_op;
4101 static char buf[100];
4102
4103 switch (GET_CODE (operands[3]))
4104 {
4105 case PLUS:
4106 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4107 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4108 base_op = "fiadd";
4109 else
4110 base_op = "fadd";
4111 break;
4112
4113 case MINUS:
4114 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4115 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4116 base_op = "fisub";
4117 else
4118 base_op = "fsub";
4119 break;
4120
4121 case MULT:
4122 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4123 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4124 base_op = "fimul";
4125 else
4126 base_op = "fmul";
4127 break;
4128
4129 case DIV:
4130 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4131 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4132 base_op = "fidiv";
4133 else
4134 base_op = "fdiv";
4135 break;
4136
4137 default:
4138 abort ();
4139 }
4140
4141 strcpy (buf, base_op);
4142
4143 switch (GET_CODE (operands[3]))
4144 {
4145 case MULT:
4146 case PLUS:
4147 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
4148 {
4149 temp = operands[2];
4150 operands[2] = operands[1];
4151 operands[1] = temp;
4152 }
4153
4154 if (GET_CODE (operands[2]) == MEM)
4155 return strcat (buf, AS1 (%z2,%2));
4156
4157 if (NON_STACK_REG_P (operands[1]))
4158 {
4159 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4160 return "";
4161 }
4162
4163 else if (NON_STACK_REG_P (operands[2]))
4164 {
4165 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4166 return "";
4167 }
4168
4169 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4170 {
4171 if (STACK_TOP_P (operands[0]))
4172 return strcat (buf, AS2 (p,%0,%2));
4173 else
4174 return strcat (buf, AS2 (p,%2,%0));
4175 }
4176
4177 if (STACK_TOP_P (operands[0]))
4178 return strcat (buf, AS2C (%y2,%0));
4179 else
4180 return strcat (buf, AS2C (%2,%0));
4181
4182 case MINUS:
4183 case DIV:
4184 if (GET_CODE (operands[1]) == MEM)
4185 return strcat (buf, AS1 (r%z1,%1));
4186
4187 if (GET_CODE (operands[2]) == MEM)
4188 return strcat (buf, AS1 (%z2,%2));
4189
4190 if (NON_STACK_REG_P (operands[1]))
4191 {
4192 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
4193 return "";
4194 }
4195
4196 else if (NON_STACK_REG_P (operands[2]))
4197 {
4198 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4199 return "";
4200 }
4201
4202 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
4203 abort ();
4204
4205 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4206 {
4207 if (STACK_TOP_P (operands[0]))
4208 return strcat (buf, AS2 (p,%0,%2));
4209 else
4210 return strcat (buf, AS2 (rp,%2,%0));
4211 }
4212
4213 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
4214 {
4215 if (STACK_TOP_P (operands[0]))
4216 return strcat (buf, AS2 (rp,%0,%1));
4217 else
4218 return strcat (buf, AS2 (p,%1,%0));
4219 }
4220
4221 if (STACK_TOP_P (operands[0]))
4222 {
4223 if (STACK_TOP_P (operands[1]))
4224 return strcat (buf, AS2C (%y2,%0));
4225 else
4226 return strcat (buf, AS2 (r,%y1,%0));
4227 }
4228 else if (STACK_TOP_P (operands[1]))
4229 return strcat (buf, AS2C (%1,%0));
4230 else
4231 return strcat (buf, AS2 (r,%2,%0));
4232
4233 default:
4234 abort ();
4235 }
4236 }
4237 \f
4238 /* Output code for INSN to convert a float to a signed int. OPERANDS
4239 are the insn operands. The output may be SFmode or DFmode and the
4240 input operand may be SImode or DImode. As a special case, make sure
4241 that the 387 stack top dies if the output mode is DImode, because the
4242 hardware requires this. */
4243
4244 char *
4245 output_fix_trunc (insn, operands)
4246 rtx insn;
4247 rtx *operands;
4248 {
4249 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4250 rtx xops[2];
4251
4252 if (! STACK_TOP_P (operands[1]))
4253 abort ();
4254
4255 xops[0] = GEN_INT (12);
4256 xops[1] = operands[4];
4257
4258 output_asm_insn (AS1 (fnstc%W2,%2), operands);
4259 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
4260 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
4261 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
4262 output_asm_insn (AS1 (fldc%W3,%3), operands);
4263
4264 if (NON_STACK_REG_P (operands[0]))
4265 output_to_reg (operands[0], stack_top_dies, operands[3]);
4266
4267 else if (GET_CODE (operands[0]) == MEM)
4268 {
4269 if (stack_top_dies)
4270 output_asm_insn (AS1 (fistp%z0,%0), operands);
4271 else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
4272 {
4273 /* There is no DImode version of this without a stack pop, so
4274 we must emulate it. It doesn't matter much what the second
4275 instruction is, because the value being pushed on the FP stack
4276 is not used except for the following stack popping store.
4277 This case can only happen without optimization, so it doesn't
4278 matter that it is inefficient. */
4279 output_asm_insn (AS1 (fistp%z0,%0), operands);
4280 output_asm_insn (AS1 (fild%z0,%0), operands);
4281 }
4282 else
4283 output_asm_insn (AS1 (fist%z0,%0), operands);
4284 }
4285 else
4286 abort ();
4287
4288 return AS1 (fldc%W2,%2);
4289 }
4290 \f
4291 /* Output code for INSN to compare OPERANDS. The two operands might
4292 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4293 expression. If the compare is in mode CCFPEQmode, use an opcode that
4294 will not fault if a qNaN is present. */
4295
4296 char *
4297 output_float_compare (insn, operands)
4298 rtx insn;
4299 rtx *operands;
4300 {
4301 int stack_top_dies;
4302 rtx body = XVECEXP (PATTERN (insn), 0, 0);
4303 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4304 rtx tmp;
4305
4306 if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4307 {
4308 cc_status.flags |= CC_FCOMI;
4309 cc_prev_status.flags &= ~CC_TEST_AX;
4310 }
4311
4312 if (! STACK_TOP_P (operands[0]))
4313 {
4314 tmp = operands[0];
4315 operands[0] = operands[1];
4316 operands[1] = tmp;
4317 cc_status.flags |= CC_REVERSED;
4318 }
4319
4320 if (! STACK_TOP_P (operands[0]))
4321 abort ();
4322
4323 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4324
4325 if (STACK_REG_P (operands[1])
4326 && stack_top_dies
4327 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4328 && REGNO (operands[1]) != FIRST_STACK_REG)
4329 {
4330 /* If both the top of the 387 stack dies, and the other operand
4331 is also a stack register that dies, then this must be a
4332 `fcompp' float compare */
4333
4334 if (unordered_compare)
4335 {
4336 if (cc_status.flags & CC_FCOMI)
4337 {
4338 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4339 output_asm_insn (AS1 (fstp, %y0), operands);
4340 return "";
4341 }
4342 else
4343 output_asm_insn ("fucompp", operands);
4344 }
4345 else
4346 {
4347 if (cc_status.flags & CC_FCOMI)
4348 {
4349 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4350 output_asm_insn (AS1 (fstp, %y0), operands);
4351 return "";
4352 }
4353 else
4354 output_asm_insn ("fcompp", operands);
4355 }
4356 }
4357 else
4358 {
4359 static char buf[100];
4360
4361 /* Decide if this is the integer or float compare opcode, or the
4362 unordered float compare. */
4363
4364 if (unordered_compare)
4365 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4366 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4367 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4368 else
4369 strcpy (buf, "ficom");
4370
4371 /* Modify the opcode if the 387 stack is to be popped. */
4372
4373 if (stack_top_dies)
4374 strcat (buf, "p");
4375
4376 if (NON_STACK_REG_P (operands[1]))
4377 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4378 else if (cc_status.flags & CC_FCOMI)
4379 {
4380 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4381 return "";
4382 }
4383 else
4384 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4385 }
4386
4387 /* Now retrieve the condition code. */
4388
4389 return output_fp_cc0_set (insn);
4390 }
4391 \f
4392 /* Output opcodes to transfer the results of FP compare or test INSN
4393 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4394 result of the compare or test is unordered, no comparison operator
4395 succeeds except NE. Return an output template, if any. */
4396
4397 char *
4398 output_fp_cc0_set (insn)
4399 rtx insn;
4400 {
4401 rtx xops[3];
4402 rtx next;
4403 enum rtx_code code;
4404
4405 xops[0] = gen_rtx_REG (HImode, 0);
4406 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4407
4408 if (! TARGET_IEEE_FP)
4409 {
4410 if (!(cc_status.flags & CC_REVERSED))
4411 {
4412 next = next_cc0_user (insn);
4413
4414 if (GET_CODE (next) == JUMP_INSN
4415 && GET_CODE (PATTERN (next)) == SET
4416 && SET_DEST (PATTERN (next)) == pc_rtx
4417 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4418 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4419 else if (GET_CODE (PATTERN (next)) == SET)
4420 code = GET_CODE (SET_SRC (PATTERN (next)));
4421 else
4422 return "sahf";
4423
4424 if (code == GT || code == LT || code == EQ || code == NE
4425 || code == LE || code == GE)
4426 {
4427 /* We will test eax directly. */
4428 cc_status.flags |= CC_TEST_AX;
4429 return "";
4430 }
4431 }
4432
4433 return "sahf";
4434 }
4435
4436 next = next_cc0_user (insn);
4437 if (next == NULL_RTX)
4438 abort ();
4439
4440 if (GET_CODE (next) == JUMP_INSN
4441 && GET_CODE (PATTERN (next)) == SET
4442 && SET_DEST (PATTERN (next)) == pc_rtx
4443 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4444 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4445 else if (GET_CODE (PATTERN (next)) == SET)
4446 {
4447 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4448 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4449 else
4450 code = GET_CODE (SET_SRC (PATTERN (next)));
4451 }
4452
4453 else if (GET_CODE (PATTERN (next)) == PARALLEL
4454 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4455 {
4456 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4457 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4458 else
4459 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4460 }
4461 else
4462 abort ();
4463
4464 xops[0] = gen_rtx_REG (QImode, 0);
4465
4466 switch (code)
4467 {
4468 case GT:
4469 xops[1] = GEN_INT (0x45);
4470 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4471 /* je label */
4472 break;
4473
4474 case LT:
4475 xops[1] = GEN_INT (0x45);
4476 xops[2] = GEN_INT (0x01);
4477 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4478 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4479 /* je label */
4480 break;
4481
4482 case GE:
4483 xops[1] = GEN_INT (0x05);
4484 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4485 /* je label */
4486 break;
4487
4488 case LE:
4489 xops[1] = GEN_INT (0x45);
4490 xops[2] = GEN_INT (0x40);
4491 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4492 output_asm_insn (AS1 (dec%B0,%h0), xops);
4493 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4494 /* jb label */
4495 break;
4496
4497 case EQ:
4498 xops[1] = GEN_INT (0x45);
4499 xops[2] = GEN_INT (0x40);
4500 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4501 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4502 /* je label */
4503 break;
4504
4505 case NE:
4506 xops[1] = GEN_INT (0x44);
4507 xops[2] = GEN_INT (0x40);
4508 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4509 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4510 /* jne label */
4511 break;
4512
4513 case GTU:
4514 case LTU:
4515 case GEU:
4516 case LEU:
4517 default:
4518 abort ();
4519 }
4520
4521 return "";
4522 }
4523 \f
4524 #define MAX_386_STACK_LOCALS 2
4525
4526 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4527
4528 /* Define the structure for the machine field in struct function. */
4529 struct machine_function
4530 {
4531 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4532 rtx pic_label_rtx;
4533 char pic_label_name[256];
4534 };
4535
4536 /* Functions to save and restore i386_stack_locals.
4537 These will be called, via pointer variables,
4538 from push_function_context and pop_function_context. */
4539
4540 void
4541 save_386_machine_status (p)
4542 struct function *p;
4543 {
4544 p->machine
4545 = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4546 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4547 sizeof i386_stack_locals);
4548 p->machine->pic_label_rtx = pic_label_rtx;
4549 bcopy (pic_label_name, p->machine->pic_label_name, 256);
4550 }
4551
4552 void
4553 restore_386_machine_status (p)
4554 struct function *p;
4555 {
4556 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4557 sizeof i386_stack_locals);
4558 pic_label_rtx = p->machine->pic_label_rtx;
4559 bcopy (p->machine->pic_label_name, pic_label_name, 256);
4560 free (p->machine);
4561 p->machine = NULL;
4562 }
4563
4564 /* Clear stack slot assignments remembered from previous functions.
4565 This is called from INIT_EXPANDERS once before RTL is emitted for each
4566 function. */
4567
4568 void
4569 clear_386_stack_locals ()
4570 {
4571 enum machine_mode mode;
4572 int n;
4573
4574 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4575 mode = (enum machine_mode) ((int) mode + 1))
4576 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4577 i386_stack_locals[(int) mode][n] = NULL_RTX;
4578
4579 pic_label_rtx = NULL_RTX;
4580 bzero (pic_label_name, 256);
4581 /* Arrange to save and restore i386_stack_locals around nested functions. */
4582 save_machine_status = save_386_machine_status;
4583 restore_machine_status = restore_386_machine_status;
4584 }
4585
4586 /* Return a MEM corresponding to a stack slot with mode MODE.
4587 Allocate a new slot if necessary.
4588
4589 The RTL for a function can have several slots available: N is
4590 which slot to use. */
4591
4592 rtx
4593 assign_386_stack_local (mode, n)
4594 enum machine_mode mode;
4595 int n;
4596 {
4597 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4598 abort ();
4599
4600 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4601 i386_stack_locals[(int) mode][n]
4602 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4603
4604 return i386_stack_locals[(int) mode][n];
4605 }
4606 \f
4607 int is_mul(op,mode)
4608 register rtx op;
4609 enum machine_mode mode ATTRIBUTE_UNUSED;
4610 {
4611 return (GET_CODE (op) == MULT);
4612 }
4613
4614 int is_div(op,mode)
4615 register rtx op;
4616 enum machine_mode mode ATTRIBUTE_UNUSED;
4617 {
4618 return (GET_CODE (op) == DIV);
4619 }
4620 \f
4621 #ifdef NOTYET
4622 /* Create a new copy of an rtx.
4623 Recursively copies the operands of the rtx,
4624 except for those few rtx codes that are sharable.
4625 Doesn't share CONST */
4626
4627 rtx
4628 copy_all_rtx (orig)
4629 register rtx orig;
4630 {
4631 register rtx copy;
4632 register int i, j;
4633 register RTX_CODE code;
4634 register char *format_ptr;
4635
4636 code = GET_CODE (orig);
4637
4638 switch (code)
4639 {
4640 case REG:
4641 case QUEUED:
4642 case CONST_INT:
4643 case CONST_DOUBLE:
4644 case SYMBOL_REF:
4645 case CODE_LABEL:
4646 case PC:
4647 case CC0:
4648 case SCRATCH:
4649 /* SCRATCH must be shared because they represent distinct values. */
4650 return orig;
4651
4652 #if 0
4653 case CONST:
4654 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4655 a LABEL_REF, it isn't sharable. */
4656 if (GET_CODE (XEXP (orig, 0)) == PLUS
4657 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4658 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4659 return orig;
4660 break;
4661 #endif
4662 /* A MEM with a constant address is not sharable. The problem is that
4663 the constant address may need to be reloaded. If the mem is shared,
4664 then reloading one copy of this mem will cause all copies to appear
4665 to have been reloaded. */
4666 }
4667
4668 copy = rtx_alloc (code);
4669 PUT_MODE (copy, GET_MODE (orig));
4670 copy->in_struct = orig->in_struct;
4671 copy->volatil = orig->volatil;
4672 copy->unchanging = orig->unchanging;
4673 copy->integrated = orig->integrated;
4674 /* intel1 */
4675 copy->is_spill_rtx = orig->is_spill_rtx;
4676
4677 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4678
4679 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4680 {
4681 switch (*format_ptr++)
4682 {
4683 case 'e':
4684 XEXP (copy, i) = XEXP (orig, i);
4685 if (XEXP (orig, i) != NULL)
4686 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4687 break;
4688
4689 case '0':
4690 case 'u':
4691 XEXP (copy, i) = XEXP (orig, i);
4692 break;
4693
4694 case 'E':
4695 case 'V':
4696 XVEC (copy, i) = XVEC (orig, i);
4697 if (XVEC (orig, i) != NULL)
4698 {
4699 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4700 for (j = 0; j < XVECLEN (copy, i); j++)
4701 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4702 }
4703 break;
4704
4705 case 'w':
4706 XWINT (copy, i) = XWINT (orig, i);
4707 break;
4708
4709 case 'i':
4710 XINT (copy, i) = XINT (orig, i);
4711 break;
4712
4713 case 's':
4714 case 'S':
4715 XSTR (copy, i) = XSTR (orig, i);
4716 break;
4717
4718 default:
4719 abort ();
4720 }
4721 }
4722 return copy;
4723 }
4724
4725 \f
4726 /* Try to rewrite a memory address to make it valid */
4727
4728 void
4729 rewrite_address (mem_rtx)
4730 rtx mem_rtx;
4731 {
4732 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4733 int scale = 1;
4734 int offset_adjust = 0;
4735 int was_only_offset = 0;
4736 rtx mem_addr = XEXP (mem_rtx, 0);
4737 char *storage = oballoc (0);
4738 int in_struct = 0;
4739 int is_spill_rtx = 0;
4740
4741 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4742 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4743
4744 if (GET_CODE (mem_addr) == PLUS
4745 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4746 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4747 {
4748 /* This part is utilized by the combiner. */
4749 ret_rtx
4750 = gen_rtx (PLUS, GET_MODE (mem_addr),
4751 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4752 XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4753 XEXP (XEXP (mem_addr, 1), 1));
4754
4755 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4756 {
4757 XEXP (mem_rtx, 0) = ret_rtx;
4758 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4759 return;
4760 }
4761
4762 obfree (storage);
4763 }
4764
4765 /* This part is utilized by loop.c.
4766 If the address contains PLUS (reg,const) and this pattern is invalid
4767 in this case - try to rewrite the address to make it valid. */
4768 storage = oballoc (0);
4769 index_rtx = base_rtx = offset_rtx = NULL;
4770
4771 /* Find the base index and offset elements of the memory address. */
4772 if (GET_CODE (mem_addr) == PLUS)
4773 {
4774 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4775 {
4776 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4777 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4778 else
4779 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4780 }
4781
4782 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4783 {
4784 index_rtx = XEXP (mem_addr, 0);
4785 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4786 base_rtx = XEXP (mem_addr, 1);
4787 else
4788 offset_rtx = XEXP (mem_addr, 1);
4789 }
4790
4791 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4792 {
4793 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4794 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4795 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4796 == REG)
4797 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4798 == CONST_INT)
4799 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4800 == CONST_INT)
4801 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4802 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4803 {
4804 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4805 offset_rtx = XEXP (mem_addr, 1);
4806 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4807 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4808 }
4809 else
4810 {
4811 offset_rtx = XEXP (mem_addr, 1);
4812 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4813 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4814 }
4815 }
4816
4817 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4818 {
4819 was_only_offset = 1;
4820 index_rtx = NULL;
4821 base_rtx = NULL;
4822 offset_rtx = XEXP (mem_addr, 1);
4823 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4824 if (offset_adjust == 0)
4825 {
4826 XEXP (mem_rtx, 0) = offset_rtx;
4827 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4828 return;
4829 }
4830 }
4831 else
4832 {
4833 obfree (storage);
4834 return;
4835 }
4836 }
4837 else if (GET_CODE (mem_addr) == MULT)
4838 index_rtx = mem_addr;
4839 else
4840 {
4841 obfree (storage);
4842 return;
4843 }
4844
4845 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4846 {
4847 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4848 {
4849 obfree (storage);
4850 return;
4851 }
4852
4853 scale_rtx = XEXP (index_rtx, 1);
4854 scale = INTVAL (scale_rtx);
4855 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4856 }
4857
4858 /* Now find which of the elements are invalid and try to fix them. */
4859 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4860 {
4861 offset_adjust = INTVAL (index_rtx) * scale;
4862
4863 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4864 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4865 else if (offset_rtx == 0)
4866 offset_rtx = const0_rtx;
4867
4868 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4869 XEXP (mem_rtx, 0) = offset_rtx;
4870 return;
4871 }
4872
4873 if (base_rtx && GET_CODE (base_rtx) == PLUS
4874 && GET_CODE (XEXP (base_rtx, 0)) == REG
4875 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4876 {
4877 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4878 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4879 }
4880
4881 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4882 {
4883 offset_adjust += INTVAL (base_rtx);
4884 base_rtx = NULL;
4885 }
4886
4887 if (index_rtx && GET_CODE (index_rtx) == PLUS
4888 && GET_CODE (XEXP (index_rtx, 0)) == REG
4889 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4890 {
4891 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4892 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4893 }
4894
4895 if (index_rtx)
4896 {
4897 if (! LEGITIMATE_INDEX_P (index_rtx)
4898 && ! (index_rtx == stack_pointer_rtx && scale == 1
4899 && base_rtx == NULL))
4900 {
4901 obfree (storage);
4902 return;
4903 }
4904 }
4905
4906 if (base_rtx)
4907 {
4908 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4909 {
4910 obfree (storage);
4911 return;
4912 }
4913 }
4914
4915 if (offset_adjust != 0)
4916 {
4917 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4918 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4919 else
4920 offset_rtx = const0_rtx;
4921
4922 if (index_rtx)
4923 {
4924 if (base_rtx)
4925 {
4926 if (scale != 1)
4927 {
4928 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4929 gen_rtx (MULT, GET_MODE (index_rtx),
4930 index_rtx, scale_rtx),
4931 base_rtx);
4932
4933 if (GET_CODE (offset_rtx) != CONST_INT
4934 || INTVAL (offset_rtx) != 0)
4935 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4936 ret_rtx, offset_rtx);
4937 }
4938 else
4939 {
4940 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4941 index_rtx, base_rtx);
4942
4943 if (GET_CODE (offset_rtx) != CONST_INT
4944 || INTVAL (offset_rtx) != 0)
4945 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4946 ret_rtx, offset_rtx);
4947 }
4948 }
4949 else
4950 {
4951 if (scale != 1)
4952 {
4953 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4954 index_rtx, scale_rtx);
4955
4956 if (GET_CODE (offset_rtx) != CONST_INT
4957 || INTVAL (offset_rtx) != 0)
4958 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4959 ret_rtx, offset_rtx);
4960 }
4961 else
4962 {
4963 if (GET_CODE (offset_rtx) == CONST_INT
4964 && INTVAL (offset_rtx) == 0)
4965 ret_rtx = index_rtx;
4966 else
4967 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4968 index_rtx, offset_rtx);
4969 }
4970 }
4971 }
4972 else
4973 {
4974 if (base_rtx)
4975 {
4976 if (GET_CODE (offset_rtx) == CONST_INT
4977 && INTVAL (offset_rtx) == 0)
4978 ret_rtx = base_rtx;
4979 else
4980 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4981 offset_rtx);
4982 }
4983 else if (was_only_offset)
4984 ret_rtx = offset_rtx;
4985 else
4986 {
4987 obfree (storage);
4988 return;
4989 }
4990 }
4991
4992 XEXP (mem_rtx, 0) = ret_rtx;
4993 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4994 return;
4995 }
4996 else
4997 {
4998 obfree (storage);
4999 return;
5000 }
5001 }
5002 #endif /* NOTYET */
5003 \f
5004 /* Return 1 if the first insn to set cc before INSN also sets the register
5005 REG_RTX; otherwise return 0. */
5006 int
5007 last_to_set_cc (reg_rtx, insn)
5008 rtx reg_rtx, insn;
5009 {
5010 rtx prev_insn = PREV_INSN (insn);
5011
5012 while (prev_insn)
5013 {
5014 if (GET_CODE (prev_insn) == NOTE)
5015 ;
5016
5017 else if (GET_CODE (prev_insn) == INSN)
5018 {
5019 if (GET_CODE (PATTERN (prev_insn)) != SET)
5020 return (0);
5021
5022 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
5023 {
5024 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
5025 return (1);
5026
5027 return (0);
5028 }
5029
5030 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
5031 return (0);
5032 }
5033
5034 else
5035 return (0);
5036
5037 prev_insn = PREV_INSN (prev_insn);
5038 }
5039
5040 return (0);
5041 }
5042 \f
5043 int
5044 doesnt_set_condition_code (pat)
5045 rtx pat;
5046 {
5047 switch (GET_CODE (pat))
5048 {
5049 case MEM:
5050 case REG:
5051 return 1;
5052
5053 default:
5054 return 0;
5055
5056 }
5057 }
5058 \f
5059 int
5060 sets_condition_code (pat)
5061 rtx pat;
5062 {
5063 switch (GET_CODE (pat))
5064 {
5065 case PLUS:
5066 case MINUS:
5067 case AND:
5068 case IOR:
5069 case XOR:
5070 case NOT:
5071 case NEG:
5072 case MULT:
5073 case DIV:
5074 case MOD:
5075 case UDIV:
5076 case UMOD:
5077 return 1;
5078
5079 default:
5080 return (0);
5081 }
5082 }
5083 \f
5084 int
5085 str_immediate_operand (op, mode)
5086 register rtx op;
5087 enum machine_mode mode ATTRIBUTE_UNUSED;
5088 {
5089 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
5090 return 1;
5091
5092 return 0;
5093 }
5094 \f
5095 int
5096 is_fp_insn (insn)
5097 rtx insn;
5098 {
5099 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5100 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5101 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5102 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
5103 return 1;
5104
5105 return 0;
5106 }
5107
5108 /* Return 1 if the mode of the SET_DEST of insn is floating point
5109 and it is not an fld or a move from memory to memory.
5110 Otherwise return 0 */
5111
5112 int
5113 is_fp_dest (insn)
5114 rtx insn;
5115 {
5116 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5117 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5118 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5119 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5120 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5121 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
5122 && GET_CODE (SET_SRC (PATTERN (insn))) != MEM)
5123 return 1;
5124
5125 return 0;
5126 }
5127
5128 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
5129 memory and the source is a register. */
5130
5131 int
5132 is_fp_store (insn)
5133 rtx insn;
5134 {
5135 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5136 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5137 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5138 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5139 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
5140 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
5141 return 1;
5142
5143 return 0;
5144 }
5145 \f
5146 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
5147 or index to reference memory.
5148 otherwise return 0 */
5149
5150 int
5151 agi_dependent (insn, dep_insn)
5152 rtx insn, dep_insn;
5153 {
5154 if (GET_CODE (dep_insn) == INSN
5155 && GET_CODE (PATTERN (dep_insn)) == SET
5156 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
5157 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
5158
5159 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
5160 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
5161 && push_operand (SET_DEST (PATTERN (dep_insn)),
5162 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
5163 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
5164
5165 return 0;
5166 }
5167 \f
5168 /* Return 1 if reg is used in rtl as a base or index for a memory ref
5169 otherwise return 0. */
5170
5171 int
5172 reg_mentioned_in_mem (reg, rtl)
5173 rtx reg, rtl;
5174 {
5175 register char *fmt;
5176 register int i, j;
5177 register enum rtx_code code;
5178
5179 if (rtl == NULL)
5180 return 0;
5181
5182 code = GET_CODE (rtl);
5183
5184 switch (code)
5185 {
5186 case HIGH:
5187 case CONST_INT:
5188 case CONST:
5189 case CONST_DOUBLE:
5190 case SYMBOL_REF:
5191 case LABEL_REF:
5192 case PC:
5193 case CC0:
5194 case SUBREG:
5195 return 0;
5196 default:
5197 break;
5198 }
5199
5200 if (code == MEM && reg_mentioned_p (reg, rtl))
5201 return 1;
5202
5203 fmt = GET_RTX_FORMAT (code);
5204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5205 {
5206 if (fmt[i] == 'E')
5207 {
5208 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
5209 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
5210 return 1;
5211 }
5212
5213 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
5214 return 1;
5215 }
5216
5217 return 0;
5218 }
5219 \f
5220 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
5221
5222 operands[0] = result, initialized with the startaddress
5223 operands[1] = alignment of the address.
5224 operands[2] = scratch register, initialized with the startaddress when
5225 not aligned, otherwise undefined
5226
5227 This is just the body. It needs the initialisations mentioned above and
5228 some address computing at the end. These things are done in i386.md. */
5229
5230 char *
5231 output_strlen_unroll (operands)
5232 rtx operands[];
5233 {
5234 rtx xops[18];
5235
5236 xops[0] = operands[0]; /* Result */
5237 /* operands[1]; * Alignment */
5238 xops[1] = operands[2]; /* Scratch */
5239 xops[2] = GEN_INT (0);
5240 xops[3] = GEN_INT (2);
5241 xops[4] = GEN_INT (3);
5242 xops[5] = GEN_INT (4);
5243 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
5244 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
5245 xops[8] = gen_label_rtx (); /* label of main loop */
5246
5247 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
5248 xops[9] = gen_label_rtx (); /* pentium optimisation */
5249
5250 xops[10] = gen_label_rtx (); /* end label 2 */
5251 xops[11] = gen_label_rtx (); /* end label 1 */
5252 xops[12] = gen_label_rtx (); /* end label */
5253 /* xops[13] * Temporary used */
5254 xops[14] = GEN_INT (0xff);
5255 xops[15] = GEN_INT (0xff00);
5256 xops[16] = GEN_INT (0xff0000);
5257 xops[17] = GEN_INT (0xff000000);
5258
5259 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
5260
5261 /* Is there a known alignment and is it less than 4? */
5262 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
5263 {
5264 /* Is there a known alignment and is it not 2? */
5265 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5266 {
5267 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5268 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5269
5270 /* Leave just the 3 lower bits.
5271 If this is a q-register, then the high part is used later
5272 therefore use andl rather than andb. */
5273 output_asm_insn (AS2 (and%L1,%4,%1), xops);
5274
5275 /* Is aligned to 4-byte address when zero */
5276 output_asm_insn (AS1 (je,%l8), xops);
5277
5278 /* Side-effect even Parity when %eax == 3 */
5279 output_asm_insn (AS1 (jp,%6), xops);
5280
5281 /* Is it aligned to 2 bytes ? */
5282 if (QI_REG_P (xops[1]))
5283 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5284 else
5285 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5286
5287 output_asm_insn (AS1 (je,%7), xops);
5288 }
5289 else
5290 {
5291 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5292 check if is aligned to 4 - byte. */
5293 output_asm_insn (AS2 (and%L1,%3,%1), xops);
5294
5295 /* Is aligned to 4-byte address when zero */
5296 output_asm_insn (AS1 (je,%l8), xops);
5297 }
5298
5299 xops[13] = gen_rtx_MEM (QImode, xops[0]);
5300
5301 /* Now compare the bytes; compare with the high part of a q-reg
5302 gives shorter code. */
5303 if (QI_REG_P (xops[1]))
5304 {
5305 /* Compare the first n unaligned byte on a byte per byte basis. */
5306 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5307
5308 /* When zero we reached the end. */
5309 output_asm_insn (AS1 (je,%l12), xops);
5310
5311 /* Increment the address. */
5312 output_asm_insn (AS1 (inc%L0,%0), xops);
5313
5314 /* Not needed with an alignment of 2 */
5315 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5316 {
5317 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5318 CODE_LABEL_NUMBER (xops[7]));
5319 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5320 output_asm_insn (AS1 (je,%l12), xops);
5321 output_asm_insn (AS1 (inc%L0,%0), xops);
5322
5323 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5324 CODE_LABEL_NUMBER (xops[6]));
5325 }
5326
5327 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5328 }
5329 else
5330 {
5331 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5332 output_asm_insn (AS1 (je,%l12), xops);
5333 output_asm_insn (AS1 (inc%L0,%0), xops);
5334
5335 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5336 CODE_LABEL_NUMBER (xops[7]));
5337 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5338 output_asm_insn (AS1 (je,%l12), xops);
5339 output_asm_insn (AS1 (inc%L0,%0), xops);
5340
5341 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5342 CODE_LABEL_NUMBER (xops[6]));
5343 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5344 }
5345
5346 output_asm_insn (AS1 (je,%l12), xops);
5347 output_asm_insn (AS1 (inc%L0,%0), xops);
5348 }
5349
5350 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5351 align this loop. It gives only huge programs, but does not help to
5352 speed up. */
5353 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5354
5355 xops[13] = gen_rtx_MEM (SImode, xops[0]);
5356 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5357
5358 if (QI_REG_P (xops[1]))
5359 {
5360 /* On i586 it is faster to combine the hi- and lo- part as
5361 a kind of lookahead. If anding both yields zero, then one
5362 of both *could* be zero, otherwise none of both is zero;
5363 this saves one instruction, on i486 this is slower
5364 tested with P-90, i486DX2-66, AMD486DX2-66 */
5365 if (TARGET_PENTIUM)
5366 {
5367 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5368 output_asm_insn (AS1 (jne,%l9), xops);
5369 }
5370
5371 /* Check first byte. */
5372 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5373 output_asm_insn (AS1 (je,%l12), xops);
5374
5375 /* Check second byte. */
5376 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5377 output_asm_insn (AS1 (je,%l11), xops);
5378
5379 if (TARGET_PENTIUM)
5380 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5381 CODE_LABEL_NUMBER (xops[9]));
5382 }
5383
5384 else
5385 {
5386 /* Check first byte. */
5387 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5388 output_asm_insn (AS1 (je,%l12), xops);
5389
5390 /* Check second byte. */
5391 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5392 output_asm_insn (AS1 (je,%l11), xops);
5393 }
5394
5395 /* Check third byte. */
5396 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5397 output_asm_insn (AS1 (je,%l10), xops);
5398
5399 /* Check fourth byte and increment address. */
5400 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5401 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5402 output_asm_insn (AS1 (jne,%l8), xops);
5403
5404 /* Now generate fixups when the compare stops within a 4-byte word. */
5405 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5406
5407 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5408 output_asm_insn (AS1 (inc%L0,%0), xops);
5409
5410 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5411 output_asm_insn (AS1 (inc%L0,%0), xops);
5412
5413 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5414
5415 return "";
5416 }
5417
5418 char *
5419 output_fp_conditional_move (which_alternative, operands)
5420 int which_alternative;
5421 rtx operands[];
5422 {
5423 switch (which_alternative)
5424 {
5425 case 0:
5426 /* r <- cond ? arg : r */
5427 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5428 break;
5429
5430 case 1:
5431 /* r <- cond ? r : arg */
5432 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5433 break;
5434
5435 case 2:
5436 /* r <- cond ? r : arg */
5437 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5438 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5439 break;
5440
5441 default:
5442 abort ();
5443 }
5444
5445 return "";
5446 }
5447
5448 char *
5449 output_int_conditional_move (which_alternative, operands)
5450 int which_alternative;
5451 rtx operands[];
5452 {
5453 int code = GET_CODE (operands[1]);
5454 enum machine_mode mode;
5455 rtx xops[4];
5456
5457 /* This is very tricky. We have to do it right. For a code segement
5458 like:
5459
5460 int foo, bar;
5461 ....
5462 foo = foo - x;
5463 if (foo >= 0)
5464 bar = y;
5465
5466 final_scan_insn () may delete the insn which sets CC. We have to
5467 tell final_scan_insn () if it should be reinserted. When CODE is
5468 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5469 NULL_PTR to tell final to reinsert the test insn because the
5470 conditional move cannot be handled properly without it. */
5471 if ((code == GT || code == LE)
5472 && (cc_prev_status.flags & CC_NO_OVERFLOW))
5473 return NULL_PTR;
5474
5475 mode = GET_MODE (operands [0]);
5476 if (mode == DImode)
5477 {
5478 xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5479 xops [1] = operands [1];
5480 xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5481 xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5482 }
5483
5484 switch (which_alternative)
5485 {
5486 case 0:
5487 /* r <- cond ? arg : r */
5488 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5489 if (mode == DImode)
5490 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5491 break;
5492
5493 case 1:
5494 /* r <- cond ? r : arg */
5495 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5496 if (mode == DImode)
5497 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5498 break;
5499
5500 case 2:
5501 /* rm <- cond ? arg1 : arg2 */
5502 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5503 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5504 if (mode == DImode)
5505 {
5506 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5507 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5508 }
5509 break;
5510
5511 default:
5512 abort ();
5513 }
5514
5515 return "";
5516 }
5517
5518 int
5519 x86_adjust_cost (insn, link, dep_insn, cost)
5520 rtx insn, link, dep_insn;
5521 int cost;
5522 {
5523 rtx next_inst;
5524
5525 if (GET_CODE (dep_insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
5526 return 0;
5527
5528 if (GET_CODE (dep_insn) == INSN
5529 && GET_CODE (PATTERN (dep_insn)) == SET
5530 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG
5531 && GET_CODE (insn) == INSN
5532 && GET_CODE (PATTERN (insn)) == SET
5533 && !reg_overlap_mentioned_p (SET_DEST (PATTERN (dep_insn)),
5534 SET_SRC (PATTERN (insn))))
5535 return 0; /* ??? */
5536
5537
5538 switch (ix86_cpu)
5539 {
5540 case PROCESSOR_PENTIUM:
5541 if (cost != 0 && is_fp_insn (insn) && is_fp_insn (dep_insn)
5542 && !is_fp_dest (dep_insn))
5543 return 0;
5544
5545 if (agi_dependent (insn, dep_insn))
5546 return 3;
5547
5548 if (GET_CODE (insn) == INSN
5549 && GET_CODE (PATTERN (insn)) == SET
5550 && SET_DEST (PATTERN (insn)) == cc0_rtx
5551 && (next_inst = next_nonnote_insn (insn))
5552 && GET_CODE (next_inst) == JUMP_INSN)
5553 /* compare probably paired with jump */
5554 return 0;
5555 break;
5556
5557 case PROCESSOR_K6:
5558 default:
5559 if (!is_fp_dest (dep_insn))
5560 {
5561 if(!agi_dependent (insn, dep_insn))
5562 return 0;
5563 if (TARGET_486)
5564 return 2;
5565 }
5566 else
5567 if (is_fp_store (insn) && is_fp_insn (dep_insn)
5568 && NEXT_INSN (insn) && NEXT_INSN (NEXT_INSN (insn))
5569 && NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))
5570 && (GET_CODE (NEXT_INSN (insn)) == INSN)
5571 && (GET_CODE (NEXT_INSN (NEXT_INSN (insn))) == JUMP_INSN)
5572 && (GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))) == NOTE)
5573 && (NOTE_LINE_NUMBER (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn))))
5574 == NOTE_INSN_LOOP_END))
5575 return 3;
5576 break;
5577 }
5578
5579 return cost;
5580 }
This page took 0.286782 seconds and 5 git commands to generate.