]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
flow.c: Update comment.
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <setjmp.h>
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "hard-reg-set.h"
27 #include "real.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "tree.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "toplev.h"
40
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
48 #endif
49
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
52 #endif
53
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
55 enum reg_mem
56 {
57 reg_p,
58 mem_p,
59 imm_p
60 };
61
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost = { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
71 };
72
73 struct processor_costs i486_cost = { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
81 };
82
83 struct processor_costs pentium_cost = {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
91 };
92
93 struct processor_costs pentiumpro_cost = {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
101 };
102
103 struct processor_costs k6_cost = {
104 1, /* cost of an add instruction */
105 1, /* cost of a lea instruction */
106 1, /* variable shift costs */
107 1, /* constant shift costs */
108 2, /* cost of starting a multiply */
109 0, /* cost of multiply per each bit set */
110 18 /* cost of a divide/mod */
111 };
112
113 struct processor_costs *ix86_cost = &pentium_cost;
114
115 /* Processor feature/optimization bitmasks. */
116 #define m_386 (1<<PROCESSOR_I386)
117 #define m_486 (1<<PROCESSOR_I486)
118 #define m_PENT (1<<PROCESSOR_PENTIUM)
119 #define m_PPRO (1<<PROCESSOR_PENTIUMPRO)
120 #define m_K6 (1<<PROCESSOR_K6)
121
122 const int x86_use_leave = m_386 | m_K6;
123 const int x86_push_memory = m_386 | m_K6;
124 const int x86_zero_extend_with_and = m_486 | m_PENT;
125 const int x86_movx = m_386 | m_PPRO | m_K6;
126 const int x86_double_with_add = ~m_386;
127 const int x86_use_bit_test = m_386;
128 const int x86_unroll_strlen = m_486 | m_PENT | m_PPRO;
129 const int x86_use_q_reg = m_PENT | m_PPRO | m_K6;
130 const int x86_use_any_reg = m_486;
131 const int x86_cmove = m_PPRO;
132 const int x86_deep_branch = m_PPRO| m_K6;
133
134 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
135
136 extern FILE *asm_out_file;
137 extern char *strcat ();
138
139 static void ix86_epilogue PROTO((int));
140 static void ix86_prologue PROTO((int));
141
142 char *singlemove_string ();
143 char *output_move_const_single ();
144 char *output_fp_cc0_set ();
145
146 char *hi_reg_name[] = HI_REGISTER_NAMES;
147 char *qi_reg_name[] = QI_REGISTER_NAMES;
148 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
149
150 /* Array of the smallest class containing reg number REGNO, indexed by
151 REGNO. Used by REGNO_REG_CLASS in i386.h. */
152
153 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
154 {
155 /* ax, dx, cx, bx */
156 AREG, DREG, CREG, BREG,
157 /* si, di, bp, sp */
158 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
159 /* FP registers */
160 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
161 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
162 /* arg pointer */
163 INDEX_REGS
164 };
165
166 /* Test and compare insns in i386.md store the information needed to
167 generate branch and scc insns here. */
168
169 struct rtx_def *i386_compare_op0 = NULL_RTX;
170 struct rtx_def *i386_compare_op1 = NULL_RTX;
171 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
172
173 /* which cpu are we scheduling for */
174 enum processor_type ix86_cpu;
175
176 /* which instruction set architecture to use. */
177 int ix86_arch;
178
179 /* Strings to hold which cpu and instruction set architecture to use. */
180 char *ix86_cpu_string; /* for -mcpu=<xxx> */
181 char *ix86_arch_string; /* for -march=<xxx> */
182
183 /* Register allocation order */
184 char *i386_reg_alloc_order;
185 static char regs_allocated[FIRST_PSEUDO_REGISTER];
186
187 /* # of registers to use to pass arguments. */
188 char *i386_regparm_string;
189
190 /* i386_regparm_string as a number */
191 int i386_regparm;
192
193 /* Alignment to use for loops and jumps: */
194
195 /* Power of two alignment for loops. */
196 char *i386_align_loops_string;
197
198 /* Power of two alignment for non-loop jumps. */
199 char *i386_align_jumps_string;
200
201 /* Values 1-5: see jump.c */
202 int i386_branch_cost;
203 char *i386_branch_cost_string;
204
205 /* Power of two alignment for functions. */
206 int i386_align_funcs;
207 char *i386_align_funcs_string;
208
209 /* Power of two alignment for loops. */
210 int i386_align_loops;
211
212 /* Power of two alignment for non-loop jumps. */
213 int i386_align_jumps;
214
215 /* Sometimes certain combinations of command options do not make
216 sense on a particular target machine. You can define a macro
217 `OVERRIDE_OPTIONS' to take account of this. This macro, if
218 defined, is executed once just after all the command options have
219 been parsed.
220
221 Don't use this macro to turn on various extra optimizations for
222 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
223
224 void
225 override_options ()
226 {
227 int ch, i, j;
228 int def_align;
229
230 static struct ptt
231 {
232 char *name; /* Canonical processor name. */
233 enum processor_type processor; /* Processor type enum value. */
234 struct processor_costs *cost; /* Processor costs */
235 int target_enable; /* Target flags to enable. */
236 int target_disable; /* Target flags to disable. */
237 } processor_target_table[]
238 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
239 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
240 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
241 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
242 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
243 0, 0},
244 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
245 &pentiumpro_cost, 0, 0},
246 {PROCESSOR_K6_STRING, PROCESSOR_K6, &k6_cost, 0, 0}};
247
248 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
249
250 #ifdef SUBTARGET_OVERRIDE_OPTIONS
251 SUBTARGET_OVERRIDE_OPTIONS;
252 #endif
253
254 /* Validate registers in register allocation order. */
255 if (i386_reg_alloc_order)
256 {
257 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
258 {
259 int regno = 0;
260
261 switch (ch)
262 {
263 case 'a': regno = 0; break;
264 case 'd': regno = 1; break;
265 case 'c': regno = 2; break;
266 case 'b': regno = 3; break;
267 case 'S': regno = 4; break;
268 case 'D': regno = 5; break;
269 case 'B': regno = 6; break;
270
271 default: fatal ("Register '%c' is unknown", ch);
272 }
273
274 if (regs_allocated[regno])
275 fatal ("Register '%c' already specified in allocation order", ch);
276
277 regs_allocated[regno] = 1;
278 }
279 }
280
281 if (ix86_arch_string == 0)
282 {
283 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
284 if (ix86_cpu_string == 0)
285 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
286 }
287
288 for (i = 0; i < ptt_size; i++)
289 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
290 {
291 ix86_arch = processor_target_table[i].processor;
292 if (ix86_cpu_string == 0)
293 ix86_cpu_string = processor_target_table[i].name;
294 break;
295 }
296
297 if (i == ptt_size)
298 {
299 error ("bad value (%s) for -march= switch", ix86_arch_string);
300 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
301 ix86_arch = PROCESSOR_DEFAULT;
302 }
303
304 if (ix86_cpu_string == 0)
305 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
306
307 for (j = 0; j < ptt_size; j++)
308 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
309 {
310 ix86_cpu = processor_target_table[j].processor;
311 ix86_cost = processor_target_table[j].cost;
312 if (i > j && (int) ix86_arch >= (int) PROCESSOR_K6)
313 error ("-mcpu=%s does not support -march=%s",
314 ix86_cpu_string, ix86_arch_string);
315
316 target_flags |= processor_target_table[j].target_enable;
317 target_flags &= ~processor_target_table[j].target_disable;
318 break;
319 }
320
321 if (j == ptt_size)
322 {
323 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
324 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
325 ix86_cpu = PROCESSOR_DEFAULT;
326 }
327
328 /* Validate -mregparm= value. */
329 if (i386_regparm_string)
330 {
331 i386_regparm = atoi (i386_regparm_string);
332 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
333 fatal ("-mregparm=%d is not between 0 and %d",
334 i386_regparm, REGPARM_MAX);
335 }
336
337 /* The 486 suffers more from non-aligned cache line fills, and the
338 larger code size results in a larger cache foot-print and more misses.
339 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
340 cache line. */
341 def_align = (TARGET_486) ? 4 : 2;
342
343 /* Validate -malign-loops= value, or provide default. */
344 if (i386_align_loops_string)
345 {
346 i386_align_loops = atoi (i386_align_loops_string);
347 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
348 fatal ("-malign-loops=%d is not between 0 and %d",
349 i386_align_loops, MAX_CODE_ALIGN);
350 }
351 else
352 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
353 i386_align_loops = 4;
354 #else
355 i386_align_loops = 2;
356 #endif
357
358 /* Validate -malign-jumps= value, or provide default. */
359 if (i386_align_jumps_string)
360 {
361 i386_align_jumps = atoi (i386_align_jumps_string);
362 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
363 fatal ("-malign-jumps=%d is not between 0 and %d",
364 i386_align_jumps, MAX_CODE_ALIGN);
365 }
366 else
367 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
368 i386_align_jumps = 4;
369 #else
370 i386_align_jumps = def_align;
371 #endif
372
373 /* Validate -malign-functions= value, or provide default. */
374 if (i386_align_funcs_string)
375 {
376 i386_align_funcs = atoi (i386_align_funcs_string);
377 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
378 fatal ("-malign-functions=%d is not between 0 and %d",
379 i386_align_funcs, MAX_CODE_ALIGN);
380 }
381 else
382 i386_align_funcs = def_align;
383
384 /* Validate -mbranch-cost= value, or provide default. */
385 if (i386_branch_cost_string)
386 {
387 i386_branch_cost = atoi (i386_branch_cost_string);
388 if (i386_branch_cost < 0 || i386_branch_cost > 5)
389 fatal ("-mbranch-cost=%d is not between 0 and 5",
390 i386_branch_cost);
391 }
392 else
393 i386_branch_cost = 1;
394
395 /* Keep nonleaf frame pointers. */
396 if (TARGET_OMIT_LEAF_FRAME_POINTER)
397 flag_omit_frame_pointer = 1;
398 }
399 \f
400 /* A C statement (sans semicolon) to choose the order in which to
401 allocate hard registers for pseudo-registers local to a basic
402 block.
403
404 Store the desired register order in the array `reg_alloc_order'.
405 Element 0 should be the register to allocate first; element 1, the
406 next register; and so on.
407
408 The macro body should not assume anything about the contents of
409 `reg_alloc_order' before execution of the macro.
410
411 On most machines, it is not necessary to define this macro. */
412
413 void
414 order_regs_for_local_alloc ()
415 {
416 int i, ch, order;
417
418 /* User specified the register allocation order. */
419
420 if (i386_reg_alloc_order)
421 {
422 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
423 {
424 int regno = 0;
425
426 switch (ch)
427 {
428 case 'a': regno = 0; break;
429 case 'd': regno = 1; break;
430 case 'c': regno = 2; break;
431 case 'b': regno = 3; break;
432 case 'S': regno = 4; break;
433 case 'D': regno = 5; break;
434 case 'B': regno = 6; break;
435 }
436
437 reg_alloc_order[order++] = regno;
438 }
439
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 {
442 if (! regs_allocated[i])
443 reg_alloc_order[order++] = i;
444 }
445 }
446
447 /* If user did not specify a register allocation order, use natural order. */
448 else
449 {
450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
451 reg_alloc_order[i] = i;
452 }
453 }
454 \f
455 void
456 optimization_options (level, size)
457 int level;
458 int size ATTRIBUTE_UNUSED;
459 {
460 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
461 make the problem with not enough registers even worse. */
462 #ifdef INSN_SCHEDULING
463 if (level > 1)
464 flag_schedule_insns = 0;
465 #endif
466 }
467 \f
468 /* Sign-extend a 16-bit constant */
469
470 struct rtx_def *
471 i386_sext16_if_const (op)
472 struct rtx_def *op;
473 {
474 if (GET_CODE (op) == CONST_INT)
475 {
476 HOST_WIDE_INT val = INTVAL (op);
477 HOST_WIDE_INT sext_val;
478 if (val & 0x8000)
479 sext_val = val | ~0xffff;
480 else
481 sext_val = val & 0xffff;
482 if (sext_val != val)
483 op = GEN_INT (sext_val);
484 }
485 return op;
486 }
487 \f
488 /* Return nonzero if the rtx is aligned */
489
490 static int
491 i386_aligned_reg_p (regno)
492 int regno;
493 {
494 return (regno == STACK_POINTER_REGNUM
495 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
496 }
497
498 int
499 i386_aligned_p (op)
500 rtx op;
501 {
502 /* Registers and immediate operands are always "aligned". */
503 if (GET_CODE (op) != MEM)
504 return 1;
505
506 /* Don't even try to do any aligned optimizations with volatiles. */
507 if (MEM_VOLATILE_P (op))
508 return 0;
509
510 /* Get address of memory operand. */
511 op = XEXP (op, 0);
512
513 switch (GET_CODE (op))
514 {
515 case CONST_INT:
516 if (INTVAL (op) & 3)
517 break;
518 return 1;
519
520 /* Match "reg + offset" */
521 case PLUS:
522 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
523 break;
524 if (INTVAL (XEXP (op, 1)) & 3)
525 break;
526
527 op = XEXP (op, 0);
528 if (GET_CODE (op) != REG)
529 break;
530
531 /* ... fall through ... */
532
533 case REG:
534 return i386_aligned_reg_p (REGNO (op));
535
536 default:
537 break;
538 }
539
540 return 0;
541 }
542 \f
543 /* Return nonzero if INSN looks like it won't compute useful cc bits
544 as a side effect. This information is only a hint. */
545
546 int
547 i386_cc_probably_useless_p (insn)
548 rtx insn;
549 {
550 return ! next_cc0_user (insn);
551 }
552 \f
553 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
554 attribute for DECL. The attributes in ATTRIBUTES have previously been
555 assigned to DECL. */
556
557 int
558 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
559 tree decl ATTRIBUTE_UNUSED;
560 tree attributes ATTRIBUTE_UNUSED;
561 tree identifier ATTRIBUTE_UNUSED;
562 tree args ATTRIBUTE_UNUSED;
563 {
564 return 0;
565 }
566
567 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
568 attribute for TYPE. The attributes in ATTRIBUTES have previously been
569 assigned to TYPE. */
570
571 int
572 i386_valid_type_attribute_p (type, attributes, identifier, args)
573 tree type;
574 tree attributes ATTRIBUTE_UNUSED;
575 tree identifier;
576 tree args;
577 {
578 if (TREE_CODE (type) != FUNCTION_TYPE
579 && TREE_CODE (type) != METHOD_TYPE
580 && TREE_CODE (type) != FIELD_DECL
581 && TREE_CODE (type) != TYPE_DECL)
582 return 0;
583
584 /* Stdcall attribute says callee is responsible for popping arguments
585 if they are not variable. */
586 if (is_attribute_p ("stdcall", identifier))
587 return (args == NULL_TREE);
588
589 /* Cdecl attribute says the callee is a normal C declaration. */
590 if (is_attribute_p ("cdecl", identifier))
591 return (args == NULL_TREE);
592
593 /* Regparm attribute specifies how many integer arguments are to be
594 passed in registers. */
595 if (is_attribute_p ("regparm", identifier))
596 {
597 tree cst;
598
599 if (! args || TREE_CODE (args) != TREE_LIST
600 || TREE_CHAIN (args) != NULL_TREE
601 || TREE_VALUE (args) == NULL_TREE)
602 return 0;
603
604 cst = TREE_VALUE (args);
605 if (TREE_CODE (cst) != INTEGER_CST)
606 return 0;
607
608 if (TREE_INT_CST_HIGH (cst) != 0
609 || TREE_INT_CST_LOW (cst) < 0
610 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
611 return 0;
612
613 return 1;
614 }
615
616 return 0;
617 }
618
619 /* Return 0 if the attributes for two types are incompatible, 1 if they
620 are compatible, and 2 if they are nearly compatible (which causes a
621 warning to be generated). */
622
623 int
624 i386_comp_type_attributes (type1, type2)
625 tree type1 ATTRIBUTE_UNUSED;
626 tree type2 ATTRIBUTE_UNUSED;
627 {
628 return 1;
629 }
630
631 \f
632 /* Value is the number of bytes of arguments automatically
633 popped when returning from a subroutine call.
634 FUNDECL is the declaration node of the function (as a tree),
635 FUNTYPE is the data type of the function (as a tree),
636 or for a library call it is an identifier node for the subroutine name.
637 SIZE is the number of bytes of arguments passed on the stack.
638
639 On the 80386, the RTD insn may be used to pop them if the number
640 of args is fixed, but if the number is variable then the caller
641 must pop them all. RTD can't be used for library calls now
642 because the library is compiled with the Unix compiler.
643 Use of RTD is a selectable option, since it is incompatible with
644 standard Unix calling sequences. If the option is not selected,
645 the caller must always pop the args.
646
647 The attribute stdcall is equivalent to RTD on a per module basis. */
648
649 int
650 i386_return_pops_args (fundecl, funtype, size)
651 tree fundecl;
652 tree funtype;
653 int size;
654 {
655 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
656
657 /* Cdecl functions override -mrtd, and never pop the stack. */
658 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
659
660 /* Stdcall functions will pop the stack if not variable args. */
661 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
662 rtd = 1;
663
664 if (rtd
665 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
666 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
667 == void_type_node)))
668 return size;
669 }
670
671 /* Lose any fake structure return argument. */
672 if (aggregate_value_p (TREE_TYPE (funtype)))
673 return GET_MODE_SIZE (Pmode);
674
675 return 0;
676 }
677
678 \f
679 /* Argument support functions. */
680
681 /* Initialize a variable CUM of type CUMULATIVE_ARGS
682 for a call to a function whose data type is FNTYPE.
683 For a library call, FNTYPE is 0. */
684
685 void
686 init_cumulative_args (cum, fntype, libname)
687 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
688 tree fntype; /* tree ptr for function decl */
689 rtx libname; /* SYMBOL_REF of library name or 0 */
690 {
691 static CUMULATIVE_ARGS zero_cum;
692 tree param, next_param;
693
694 if (TARGET_DEBUG_ARG)
695 {
696 fprintf (stderr, "\ninit_cumulative_args (");
697 if (fntype)
698 fprintf (stderr, "fntype code = %s, ret code = %s",
699 tree_code_name[(int) TREE_CODE (fntype)],
700 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
701 else
702 fprintf (stderr, "no fntype");
703
704 if (libname)
705 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
706 }
707
708 *cum = zero_cum;
709
710 /* Set up the number of registers to use for passing arguments. */
711 cum->nregs = i386_regparm;
712 if (fntype)
713 {
714 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
715
716 if (attr)
717 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
718 }
719
720 /* Determine if this function has variable arguments. This is
721 indicated by the last argument being 'void_type_mode' if there
722 are no variable arguments. If there are variable arguments, then
723 we won't pass anything in registers */
724
725 if (cum->nregs)
726 {
727 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
728 param != 0; param = next_param)
729 {
730 next_param = TREE_CHAIN (param);
731 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
732 cum->nregs = 0;
733 }
734 }
735
736 if (TARGET_DEBUG_ARG)
737 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
738
739 return;
740 }
741
742 /* Update the data in CUM to advance over an argument
743 of mode MODE and data type TYPE.
744 (TYPE is null for libcalls where that information may not be available.) */
745
746 void
747 function_arg_advance (cum, mode, type, named)
748 CUMULATIVE_ARGS *cum; /* current arg information */
749 enum machine_mode mode; /* current arg mode */
750 tree type; /* type of the argument or 0 if lib support */
751 int named; /* whether or not the argument was named */
752 {
753 int bytes
754 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
755 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
756
757 if (TARGET_DEBUG_ARG)
758 fprintf (stderr,
759 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
760 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
761
762 cum->words += words;
763 cum->nregs -= words;
764 cum->regno += words;
765
766 if (cum->nregs <= 0)
767 {
768 cum->nregs = 0;
769 cum->regno = 0;
770 }
771
772 return;
773 }
774
775 /* Define where to put the arguments to a function.
776 Value is zero to push the argument on the stack,
777 or a hard register in which to store the argument.
778
779 MODE is the argument's machine mode.
780 TYPE is the data type of the argument (as a tree).
781 This is null for libcalls where that information may
782 not be available.
783 CUM is a variable of type CUMULATIVE_ARGS which gives info about
784 the preceding args and about the function being called.
785 NAMED is nonzero if this argument is a named parameter
786 (otherwise it is an extra parameter matching an ellipsis). */
787
788 struct rtx_def *
789 function_arg (cum, mode, type, named)
790 CUMULATIVE_ARGS *cum; /* current arg information */
791 enum machine_mode mode; /* current arg mode */
792 tree type; /* type of the argument or 0 if lib support */
793 int named; /* != 0 for normal args, == 0 for ... args */
794 {
795 rtx ret = NULL_RTX;
796 int bytes
797 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
798 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
799
800 switch (mode)
801 {
802 /* For now, pass fp/complex values on the stack. */
803 default:
804 break;
805
806 case BLKmode:
807 case DImode:
808 case SImode:
809 case HImode:
810 case QImode:
811 if (words <= cum->nregs)
812 ret = gen_rtx_REG (mode, cum->regno);
813 break;
814 }
815
816 if (TARGET_DEBUG_ARG)
817 {
818 fprintf (stderr,
819 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
820 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
821
822 if (ret)
823 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
824 else
825 fprintf (stderr, ", stack");
826
827 fprintf (stderr, " )\n");
828 }
829
830 return ret;
831 }
832
833 /* For an arg passed partly in registers and partly in memory,
834 this is the number of registers used.
835 For args passed entirely in registers or entirely in memory, zero. */
836
837 int
838 function_arg_partial_nregs (cum, mode, type, named)
839 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED; /* current arg information */
840 enum machine_mode mode ATTRIBUTE_UNUSED; /* current arg mode */
841 tree type ATTRIBUTE_UNUSED; /* type of the argument or 0 if lib support */
842 int named ATTRIBUTE_UNUSED; /* != 0 for normal args, == 0 for ... args */
843 {
844 return 0;
845 }
846 \f
847 /* Output an insn whose source is a 386 integer register. SRC is the
848 rtx for the register, and TEMPLATE is the op-code template. SRC may
849 be either SImode or DImode.
850
851 The template will be output with operands[0] as SRC, and operands[1]
852 as a pointer to the top of the 386 stack. So a call from floatsidf2
853 would look like this:
854
855 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
856
857 where %z0 corresponds to the caller's operands[1], and is used to
858 emit the proper size suffix.
859
860 ??? Extend this to handle HImode - a 387 can load and store HImode
861 values directly. */
862
863 void
864 output_op_from_reg (src, template)
865 rtx src;
866 char *template;
867 {
868 rtx xops[4];
869 int size = GET_MODE_SIZE (GET_MODE (src));
870
871 xops[0] = src;
872 xops[1] = AT_SP (Pmode);
873 xops[2] = GEN_INT (size);
874 xops[3] = stack_pointer_rtx;
875
876 if (size > UNITS_PER_WORD)
877 {
878 rtx high;
879
880 if (size > 2 * UNITS_PER_WORD)
881 {
882 high = gen_rtx_REG (SImode, REGNO (src) + 2);
883 output_asm_insn (AS1 (push%L0,%0), &high);
884 }
885
886 high = gen_rtx_REG (SImode, REGNO (src) + 1);
887 output_asm_insn (AS1 (push%L0,%0), &high);
888 }
889
890 output_asm_insn (AS1 (push%L0,%0), &src);
891 output_asm_insn (template, xops);
892 output_asm_insn (AS2 (add%L3,%2,%3), xops);
893 }
894 \f
895 /* Output an insn to pop an value from the 387 top-of-stack to 386
896 register DEST. The 387 register stack is popped if DIES is true. If
897 the mode of DEST is an integer mode, a `fist' integer store is done,
898 otherwise a `fst' float store is done. */
899
900 void
901 output_to_reg (dest, dies, scratch_mem)
902 rtx dest;
903 int dies;
904 rtx scratch_mem;
905 {
906 rtx xops[4];
907 int size = GET_MODE_SIZE (GET_MODE (dest));
908
909 if (! scratch_mem)
910 xops[0] = AT_SP (Pmode);
911 else
912 xops[0] = scratch_mem;
913
914 xops[1] = stack_pointer_rtx;
915 xops[2] = GEN_INT (size);
916 xops[3] = dest;
917
918 if (! scratch_mem)
919 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
920
921 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
922 {
923 if (dies)
924 output_asm_insn (AS1 (fistp%z3,%y0), xops);
925 else if (GET_MODE (xops[3]) == DImode && ! dies)
926 {
927 /* There is no DImode version of this without a stack pop, so
928 we must emulate it. It doesn't matter much what the second
929 instruction is, because the value being pushed on the FP stack
930 is not used except for the following stack popping store.
931 This case can only happen without optimization, so it doesn't
932 matter that it is inefficient. */
933 output_asm_insn (AS1 (fistp%z3,%0), xops);
934 output_asm_insn (AS1 (fild%z3,%0), xops);
935 }
936 else
937 output_asm_insn (AS1 (fist%z3,%y0), xops);
938 }
939
940 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
941 {
942 if (dies)
943 output_asm_insn (AS1 (fstp%z3,%y0), xops);
944 else
945 {
946 if (GET_MODE (dest) == XFmode)
947 {
948 output_asm_insn (AS1 (fstp%z3,%y0), xops);
949 output_asm_insn (AS1 (fld%z3,%y0), xops);
950 }
951 else
952 output_asm_insn (AS1 (fst%z3,%y0), xops);
953 }
954 }
955
956 else
957 abort ();
958
959 if (! scratch_mem)
960 output_asm_insn (AS1 (pop%L0,%0), &dest);
961 else
962 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
963
964
965 if (size > UNITS_PER_WORD)
966 {
967 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
968 if (! scratch_mem)
969 output_asm_insn (AS1 (pop%L0,%0), &dest);
970 else
971 {
972 xops[0] = adj_offsettable_operand (xops[0], 4);
973 xops[3] = dest;
974 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
975 }
976
977 if (size > 2 * UNITS_PER_WORD)
978 {
979 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
980 if (! scratch_mem)
981 output_asm_insn (AS1 (pop%L0,%0), &dest);
982 else
983 {
984 xops[0] = adj_offsettable_operand (xops[0], 4);
985 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
986 }
987 }
988 }
989 }
990 \f
991 char *
992 singlemove_string (operands)
993 rtx *operands;
994 {
995 rtx x;
996 if (GET_CODE (operands[0]) == MEM
997 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
998 {
999 if (XEXP (x, 0) != stack_pointer_rtx)
1000 abort ();
1001 return "push%L1 %1";
1002 }
1003 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
1004 return output_move_const_single (operands);
1005 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
1006 return AS2 (mov%L0,%1,%0);
1007 else if (CONSTANT_P (operands[1]))
1008 return AS2 (mov%L0,%1,%0);
1009 else
1010 {
1011 output_asm_insn ("push%L1 %1", operands);
1012 return "pop%L0 %0";
1013 }
1014 }
1015 \f
1016 /* Return a REG that occurs in ADDR with coefficient 1.
1017 ADDR can be effectively incremented by incrementing REG. */
1018
1019 static rtx
1020 find_addr_reg (addr)
1021 rtx addr;
1022 {
1023 while (GET_CODE (addr) == PLUS)
1024 {
1025 if (GET_CODE (XEXP (addr, 0)) == REG)
1026 addr = XEXP (addr, 0);
1027 else if (GET_CODE (XEXP (addr, 1)) == REG)
1028 addr = XEXP (addr, 1);
1029 else if (CONSTANT_P (XEXP (addr, 0)))
1030 addr = XEXP (addr, 1);
1031 else if (CONSTANT_P (XEXP (addr, 1)))
1032 addr = XEXP (addr, 0);
1033 else
1034 abort ();
1035 }
1036
1037 if (GET_CODE (addr) == REG)
1038 return addr;
1039 abort ();
1040 }
1041 \f
1042 /* Output an insn to add the constant N to the register X. */
1043
1044 static void
1045 asm_add (n, x)
1046 int n;
1047 rtx x;
1048 {
1049 rtx xops[2];
1050 xops[0] = x;
1051
1052 if (n == -1)
1053 output_asm_insn (AS1 (dec%L0,%0), xops);
1054 else if (n == 1)
1055 output_asm_insn (AS1 (inc%L0,%0), xops);
1056 else if (n < 0 || n == 128)
1057 {
1058 xops[1] = GEN_INT (-n);
1059 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1060 }
1061 else if (n > 0)
1062 {
1063 xops[1] = GEN_INT (n);
1064 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1065 }
1066 }
1067 \f
1068 /* Output assembler code to perform a doubleword move insn
1069 with operands OPERANDS. */
1070
1071 char *
1072 output_move_double (operands)
1073 rtx *operands;
1074 {
1075 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1076 rtx latehalf[2];
1077 rtx middlehalf[2];
1078 rtx xops[2];
1079 rtx addreg0 = 0, addreg1 = 0;
1080 int dest_overlapped_low = 0;
1081 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1082
1083 middlehalf[0] = 0;
1084 middlehalf[1] = 0;
1085
1086 /* First classify both operands. */
1087
1088 if (REG_P (operands[0]))
1089 optype0 = REGOP;
1090 else if (offsettable_memref_p (operands[0]))
1091 optype0 = OFFSOP;
1092 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1093 optype0 = POPOP;
1094 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1095 optype0 = PUSHOP;
1096 else if (GET_CODE (operands[0]) == MEM)
1097 optype0 = MEMOP;
1098 else
1099 optype0 = RNDOP;
1100
1101 if (REG_P (operands[1]))
1102 optype1 = REGOP;
1103 else if (CONSTANT_P (operands[1]))
1104 optype1 = CNSTOP;
1105 else if (offsettable_memref_p (operands[1]))
1106 optype1 = OFFSOP;
1107 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1108 optype1 = POPOP;
1109 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1110 optype1 = PUSHOP;
1111 else if (GET_CODE (operands[1]) == MEM)
1112 optype1 = MEMOP;
1113 else
1114 optype1 = RNDOP;
1115
1116 /* Check for the cases that the operand constraints are not
1117 supposed to allow to happen. Abort if we get one,
1118 because generating code for these cases is painful. */
1119
1120 if (optype0 == RNDOP || optype1 == RNDOP)
1121 abort ();
1122
1123 /* If one operand is decrementing and one is incrementing
1124 decrement the former register explicitly
1125 and change that operand into ordinary indexing. */
1126
1127 if (optype0 == PUSHOP && optype1 == POPOP)
1128 {
1129 /* ??? Can this ever happen on i386? */
1130 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1131 asm_add (-size, operands[0]);
1132 if (GET_MODE (operands[1]) == XFmode)
1133 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1134 else if (GET_MODE (operands[0]) == DFmode)
1135 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1136 else
1137 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1138 optype0 = OFFSOP;
1139 }
1140
1141 if (optype0 == POPOP && optype1 == PUSHOP)
1142 {
1143 /* ??? Can this ever happen on i386? */
1144 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1145 asm_add (-size, operands[1]);
1146 if (GET_MODE (operands[1]) == XFmode)
1147 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1148 else if (GET_MODE (operands[1]) == DFmode)
1149 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1150 else
1151 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1152 optype1 = OFFSOP;
1153 }
1154
1155 /* If an operand is an unoffsettable memory ref, find a register
1156 we can increment temporarily to make it refer to the second word. */
1157
1158 if (optype0 == MEMOP)
1159 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1160
1161 if (optype1 == MEMOP)
1162 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1163
1164 /* Ok, we can do one word at a time.
1165 Normally we do the low-numbered word first,
1166 but if either operand is autodecrementing then we
1167 do the high-numbered word first.
1168
1169 In either case, set up in LATEHALF the operands to use
1170 for the high-numbered word and in some cases alter the
1171 operands in OPERANDS to be suitable for the low-numbered word. */
1172
1173 if (size == 12)
1174 {
1175 if (optype0 == REGOP)
1176 {
1177 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1178 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1179 }
1180 else if (optype0 == OFFSOP)
1181 {
1182 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1183 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1184 }
1185 else
1186 {
1187 middlehalf[0] = operands[0];
1188 latehalf[0] = operands[0];
1189 }
1190
1191 if (optype1 == REGOP)
1192 {
1193 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1194 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1195 }
1196 else if (optype1 == OFFSOP)
1197 {
1198 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1199 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1200 }
1201 else if (optype1 == CNSTOP)
1202 {
1203 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1204 {
1205 REAL_VALUE_TYPE r; long l[3];
1206
1207 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1208 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1209 operands[1] = GEN_INT (l[0]);
1210 middlehalf[1] = GEN_INT (l[1]);
1211 latehalf[1] = GEN_INT (l[2]);
1212 }
1213 else if (CONSTANT_P (operands[1]))
1214 /* No non-CONST_DOUBLE constant should ever appear here. */
1215 abort ();
1216 }
1217 else
1218 {
1219 middlehalf[1] = operands[1];
1220 latehalf[1] = operands[1];
1221 }
1222 }
1223
1224 else
1225 {
1226 /* Size is not 12. */
1227
1228 if (optype0 == REGOP)
1229 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1230 else if (optype0 == OFFSOP)
1231 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1232 else
1233 latehalf[0] = operands[0];
1234
1235 if (optype1 == REGOP)
1236 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1237 else if (optype1 == OFFSOP)
1238 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1239 else if (optype1 == CNSTOP)
1240 split_double (operands[1], &operands[1], &latehalf[1]);
1241 else
1242 latehalf[1] = operands[1];
1243 }
1244
1245 /* If insn is effectively movd N (sp),-(sp) then we will do the
1246 high word first. We should use the adjusted operand 1
1247 (which is N+4 (sp) or N+8 (sp))
1248 for the low word and middle word as well,
1249 to compensate for the first decrement of sp. */
1250 if (optype0 == PUSHOP
1251 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1252 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1253 middlehalf[1] = operands[1] = latehalf[1];
1254
1255 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1256 if the upper part of reg N does not appear in the MEM, arrange to
1257 emit the move late-half first. Otherwise, compute the MEM address
1258 into the upper part of N and use that as a pointer to the memory
1259 operand. */
1260 if (optype0 == REGOP
1261 && (optype1 == OFFSOP || optype1 == MEMOP))
1262 {
1263 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1264 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1265 {
1266 /* If both halves of dest are used in the src memory address,
1267 compute the address into latehalf of dest. */
1268 compadr:
1269 xops[0] = latehalf[0];
1270 xops[1] = XEXP (operands[1], 0);
1271 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1272 if (GET_MODE (operands[1]) == XFmode)
1273 {
1274 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1275 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1276 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1277 }
1278 else
1279 {
1280 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1281 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1282 }
1283 }
1284
1285 else if (size == 12
1286 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1287 {
1288 /* Check for two regs used by both source and dest. */
1289 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1290 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1291 goto compadr;
1292
1293 /* JRV says this can't happen: */
1294 if (addreg0 || addreg1)
1295 abort ();
1296
1297 /* Only the middle reg conflicts; simply put it last. */
1298 output_asm_insn (singlemove_string (operands), operands);
1299 output_asm_insn (singlemove_string (latehalf), latehalf);
1300 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1301 return "";
1302 }
1303
1304 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1305 /* If the low half of dest is mentioned in the source memory
1306 address, the arrange to emit the move late half first. */
1307 dest_overlapped_low = 1;
1308 }
1309
1310 /* If one or both operands autodecrementing,
1311 do the two words, high-numbered first. */
1312
1313 /* Likewise, the first move would clobber the source of the second one,
1314 do them in the other order. This happens only for registers;
1315 such overlap can't happen in memory unless the user explicitly
1316 sets it up, and that is an undefined circumstance. */
1317
1318 #if 0
1319 if (optype0 == PUSHOP || optype1 == PUSHOP
1320 || (optype0 == REGOP && optype1 == REGOP
1321 && REGNO (operands[0]) == REGNO (latehalf[1]))
1322 || dest_overlapped_low)
1323 #endif
1324
1325 if (optype0 == PUSHOP || optype1 == PUSHOP
1326 || (optype0 == REGOP && optype1 == REGOP
1327 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1328 || REGNO (operands[0]) == REGNO (latehalf[1])))
1329 || dest_overlapped_low)
1330 {
1331 /* Make any unoffsettable addresses point at high-numbered word. */
1332 if (addreg0)
1333 asm_add (size-4, addreg0);
1334 if (addreg1)
1335 asm_add (size-4, addreg1);
1336
1337 /* Do that word. */
1338 output_asm_insn (singlemove_string (latehalf), latehalf);
1339
1340 /* Undo the adds we just did. */
1341 if (addreg0)
1342 asm_add (-4, addreg0);
1343 if (addreg1)
1344 asm_add (-4, addreg1);
1345
1346 if (size == 12)
1347 {
1348 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1349 if (addreg0)
1350 asm_add (-4, addreg0);
1351 if (addreg1)
1352 asm_add (-4, addreg1);
1353 }
1354
1355 /* Do low-numbered word. */
1356 return singlemove_string (operands);
1357 }
1358
1359 /* Normal case: do the two words, low-numbered first. */
1360
1361 output_asm_insn (singlemove_string (operands), operands);
1362
1363 /* Do the middle one of the three words for long double */
1364 if (size == 12)
1365 {
1366 if (addreg0)
1367 asm_add (4, addreg0);
1368 if (addreg1)
1369 asm_add (4, addreg1);
1370
1371 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1372 }
1373
1374 /* Make any unoffsettable addresses point at high-numbered word. */
1375 if (addreg0)
1376 asm_add (4, addreg0);
1377 if (addreg1)
1378 asm_add (4, addreg1);
1379
1380 /* Do that word. */
1381 output_asm_insn (singlemove_string (latehalf), latehalf);
1382
1383 /* Undo the adds we just did. */
1384 if (addreg0)
1385 asm_add (4-size, addreg0);
1386 if (addreg1)
1387 asm_add (4-size, addreg1);
1388
1389 return "";
1390 }
1391 \f
1392 #define MAX_TMPS 2 /* max temporary registers used */
1393
1394 /* Output the appropriate code to move push memory on the stack */
1395
1396 char *
1397 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1398 rtx operands[];
1399 rtx insn;
1400 int length;
1401 int tmp_start;
1402 int n_operands;
1403 {
1404 struct
1405 {
1406 char *load;
1407 char *push;
1408 rtx xops[2];
1409 } tmp_info[MAX_TMPS];
1410
1411 rtx src = operands[1];
1412 int max_tmps = 0;
1413 int offset = 0;
1414 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1415 int stack_offset = 0;
1416 int i, num_tmps;
1417 rtx xops[1];
1418
1419 if (! offsettable_memref_p (src))
1420 fatal_insn ("Source is not offsettable", insn);
1421
1422 if ((length & 3) != 0)
1423 fatal_insn ("Pushing non-word aligned size", insn);
1424
1425 /* Figure out which temporary registers we have available */
1426 for (i = tmp_start; i < n_operands; i++)
1427 {
1428 if (GET_CODE (operands[i]) == REG)
1429 {
1430 if (reg_overlap_mentioned_p (operands[i], src))
1431 continue;
1432
1433 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1434 if (max_tmps == MAX_TMPS)
1435 break;
1436 }
1437 }
1438
1439 if (max_tmps == 0)
1440 for (offset = length - 4; offset >= 0; offset -= 4)
1441 {
1442 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1443 output_asm_insn (AS1(push%L0,%0), xops);
1444 if (stack_p)
1445 stack_offset += 4;
1446 }
1447
1448 else
1449 for (offset = length - 4; offset >= 0; )
1450 {
1451 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1452 {
1453 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1454 tmp_info[num_tmps].push = AS1(push%L0,%1);
1455 tmp_info[num_tmps].xops[0]
1456 = adj_offsettable_operand (src, offset + stack_offset);
1457 offset -= 4;
1458 }
1459
1460 for (i = 0; i < num_tmps; i++)
1461 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1462
1463 for (i = 0; i < num_tmps; i++)
1464 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1465
1466 if (stack_p)
1467 stack_offset += 4*num_tmps;
1468 }
1469
1470 return "";
1471 }
1472 \f
1473 /* Output the appropriate code to move data between two memory locations */
1474
1475 char *
1476 output_move_memory (operands, insn, length, tmp_start, n_operands)
1477 rtx operands[];
1478 rtx insn;
1479 int length;
1480 int tmp_start;
1481 int n_operands;
1482 {
1483 struct
1484 {
1485 char *load;
1486 char *store;
1487 rtx xops[3];
1488 } tmp_info[MAX_TMPS];
1489
1490 rtx dest = operands[0];
1491 rtx src = operands[1];
1492 rtx qi_tmp = NULL_RTX;
1493 int max_tmps = 0;
1494 int offset = 0;
1495 int i, num_tmps;
1496 rtx xops[3];
1497
1498 if (GET_CODE (dest) == MEM
1499 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1500 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1501 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1502
1503 if (! offsettable_memref_p (src))
1504 fatal_insn ("Source is not offsettable", insn);
1505
1506 if (! offsettable_memref_p (dest))
1507 fatal_insn ("Destination is not offsettable", insn);
1508
1509 /* Figure out which temporary registers we have available */
1510 for (i = tmp_start; i < n_operands; i++)
1511 {
1512 if (GET_CODE (operands[i]) == REG)
1513 {
1514 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1515 qi_tmp = operands[i];
1516
1517 if (reg_overlap_mentioned_p (operands[i], dest))
1518 fatal_insn ("Temporary register overlaps the destination", insn);
1519
1520 if (reg_overlap_mentioned_p (operands[i], src))
1521 fatal_insn ("Temporary register overlaps the source", insn);
1522
1523 tmp_info[max_tmps++].xops[2] = operands[i];
1524 if (max_tmps == MAX_TMPS)
1525 break;
1526 }
1527 }
1528
1529 if (max_tmps == 0)
1530 fatal_insn ("No scratch registers were found to do memory->memory moves",
1531 insn);
1532
1533 if ((length & 1) != 0)
1534 {
1535 if (qi_tmp == 0)
1536 fatal_insn ("No byte register found when moving odd # of bytes.",
1537 insn);
1538 }
1539
1540 while (length > 1)
1541 {
1542 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1543 {
1544 if (length >= 4)
1545 {
1546 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1547 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1548 tmp_info[num_tmps].xops[0]
1549 = adj_offsettable_operand (dest, offset);
1550 tmp_info[num_tmps].xops[1]
1551 = adj_offsettable_operand (src, offset);
1552
1553 offset += 4;
1554 length -= 4;
1555 }
1556
1557 else if (length >= 2)
1558 {
1559 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1560 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1561 tmp_info[num_tmps].xops[0]
1562 = adj_offsettable_operand (dest, offset);
1563 tmp_info[num_tmps].xops[1]
1564 = adj_offsettable_operand (src, offset);
1565
1566 offset += 2;
1567 length -= 2;
1568 }
1569 else
1570 break;
1571 }
1572
1573 for (i = 0; i < num_tmps; i++)
1574 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1575
1576 for (i = 0; i < num_tmps; i++)
1577 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1578 }
1579
1580 if (length == 1)
1581 {
1582 xops[0] = adj_offsettable_operand (dest, offset);
1583 xops[1] = adj_offsettable_operand (src, offset);
1584 xops[2] = qi_tmp;
1585 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1586 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1587 }
1588
1589 return "";
1590 }
1591 \f
1592 int
1593 standard_80387_constant_p (x)
1594 rtx x;
1595 {
1596 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1597 REAL_VALUE_TYPE d;
1598 jmp_buf handler;
1599 int is0, is1;
1600
1601 if (setjmp (handler))
1602 return 0;
1603
1604 set_float_handler (handler);
1605 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1606 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1607 is1 = REAL_VALUES_EQUAL (d, dconst1);
1608 set_float_handler (NULL_PTR);
1609
1610 if (is0)
1611 return 1;
1612
1613 if (is1)
1614 return 2;
1615
1616 /* Note that on the 80387, other constants, such as pi,
1617 are much slower to load as standard constants
1618 than to load from doubles in memory! */
1619 /* ??? Not true on K6: all constants are equal cost. */
1620 #endif
1621
1622 return 0;
1623 }
1624
1625 char *
1626 output_move_const_single (operands)
1627 rtx *operands;
1628 {
1629 if (FP_REG_P (operands[0]))
1630 {
1631 int conval = standard_80387_constant_p (operands[1]);
1632
1633 if (conval == 1)
1634 return "fldz";
1635
1636 if (conval == 2)
1637 return "fld1";
1638 }
1639
1640 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1641 {
1642 REAL_VALUE_TYPE r; long l;
1643
1644 if (GET_MODE (operands[1]) == XFmode)
1645 abort ();
1646
1647 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1648 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1649 operands[1] = GEN_INT (l);
1650 }
1651
1652 return singlemove_string (operands);
1653 }
1654 \f
1655 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1656 reference and a constant. */
1657
1658 int
1659 symbolic_operand (op, mode)
1660 register rtx op;
1661 enum machine_mode mode ATTRIBUTE_UNUSED;
1662 {
1663 switch (GET_CODE (op))
1664 {
1665 case SYMBOL_REF:
1666 case LABEL_REF:
1667 return 1;
1668
1669 case CONST:
1670 op = XEXP (op, 0);
1671 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1672 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1673 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1674
1675 default:
1676 return 0;
1677 }
1678 }
1679
1680 /* Test for a valid operand for a call instruction.
1681 Don't allow the arg pointer register or virtual regs
1682 since they may change into reg + const, which the patterns
1683 can't handle yet. */
1684
1685 int
1686 call_insn_operand (op, mode)
1687 rtx op;
1688 enum machine_mode mode ATTRIBUTE_UNUSED;
1689 {
1690 if (GET_CODE (op) == MEM
1691 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1692 /* This makes a difference for PIC. */
1693 && general_operand (XEXP (op, 0), Pmode))
1694 || (GET_CODE (XEXP (op, 0)) == REG
1695 && XEXP (op, 0) != arg_pointer_rtx
1696 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1697 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1698 return 1;
1699
1700 return 0;
1701 }
1702
1703 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1704 even if pic. */
1705
1706 int
1707 expander_call_insn_operand (op, mode)
1708 rtx op;
1709 enum machine_mode mode ATTRIBUTE_UNUSED;
1710 {
1711 if (GET_CODE (op) == MEM
1712 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1713 || (GET_CODE (XEXP (op, 0)) == REG
1714 && XEXP (op, 0) != arg_pointer_rtx
1715 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1716 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1717 return 1;
1718
1719 return 0;
1720 }
1721
1722 /* Return 1 if OP is a comparison operator that can use the condition code
1723 generated by an arithmetic operation. */
1724
1725 int
1726 arithmetic_comparison_operator (op, mode)
1727 register rtx op;
1728 enum machine_mode mode;
1729 {
1730 enum rtx_code code;
1731
1732 if (mode != VOIDmode && mode != GET_MODE (op))
1733 return 0;
1734
1735 code = GET_CODE (op);
1736 if (GET_RTX_CLASS (code) != '<')
1737 return 0;
1738
1739 return (code != GT && code != LE);
1740 }
1741
1742 int
1743 ix86_logical_operator (op, mode)
1744 register rtx op;
1745 enum machine_mode mode ATTRIBUTE_UNUSED;
1746 {
1747 return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1748 }
1749
1750 \f
1751 /* Returns 1 if OP contains a symbol reference */
1752
1753 int
1754 symbolic_reference_mentioned_p (op)
1755 rtx op;
1756 {
1757 register char *fmt;
1758 register int i;
1759
1760 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1761 return 1;
1762
1763 fmt = GET_RTX_FORMAT (GET_CODE (op));
1764 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1765 {
1766 if (fmt[i] == 'E')
1767 {
1768 register int j;
1769
1770 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1771 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1772 return 1;
1773 }
1774
1775 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1776 return 1;
1777 }
1778
1779 return 0;
1780 }
1781 \f
1782 /* Attempt to expand a binary operator. Make the expansion closer to the
1783 actual machine, then just general_operand, which will allow 3 separate
1784 memory references (one output, two input) in a single insn. Return
1785 whether the insn fails, or succeeds. */
1786
1787 int
1788 ix86_expand_binary_operator (code, mode, operands)
1789 enum rtx_code code;
1790 enum machine_mode mode;
1791 rtx operands[];
1792 {
1793 int modified;
1794
1795 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1796 if (GET_RTX_CLASS (code) == 'c'
1797 && (rtx_equal_p (operands[0], operands[2])
1798 || immediate_operand (operands[1], mode)))
1799 {
1800 rtx temp = operands[1];
1801 operands[1] = operands[2];
1802 operands[2] = temp;
1803 }
1804
1805 /* If optimizing, copy to regs to improve CSE */
1806 if (TARGET_PSEUDO && optimize
1807 && ((reload_in_progress | reload_completed) == 0))
1808 {
1809 if (GET_CODE (operands[1]) == MEM
1810 && ! rtx_equal_p (operands[0], operands[1]))
1811 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1812
1813 if (GET_CODE (operands[2]) == MEM)
1814 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1815
1816 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1817 {
1818 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1819
1820 emit_move_insn (temp, operands[1]);
1821 operands[1] = temp;
1822 return TRUE;
1823 }
1824 }
1825
1826 if (!ix86_binary_operator_ok (code, mode, operands))
1827 {
1828 /* If not optimizing, try to make a valid insn (optimize code
1829 previously did this above to improve chances of CSE) */
1830
1831 if ((! TARGET_PSEUDO || !optimize)
1832 && ((reload_in_progress | reload_completed) == 0)
1833 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1834 {
1835 modified = FALSE;
1836 if (GET_CODE (operands[1]) == MEM
1837 && ! rtx_equal_p (operands[0], operands[1]))
1838 {
1839 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1840 modified = TRUE;
1841 }
1842
1843 if (GET_CODE (operands[2]) == MEM)
1844 {
1845 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1846 modified = TRUE;
1847 }
1848
1849 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1850 {
1851 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1852
1853 emit_move_insn (temp, operands[1]);
1854 operands[1] = temp;
1855 return TRUE;
1856 }
1857
1858 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1859 return FALSE;
1860 }
1861 else
1862 return FALSE;
1863 }
1864
1865 return TRUE;
1866 }
1867 \f
1868 /* Return TRUE or FALSE depending on whether the binary operator meets the
1869 appropriate constraints. */
1870
1871 int
1872 ix86_binary_operator_ok (code, mode, operands)
1873 enum rtx_code code;
1874 enum machine_mode mode ATTRIBUTE_UNUSED;
1875 rtx operands[3];
1876 {
1877 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1878 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1879 }
1880 \f
1881 /* Attempt to expand a unary operator. Make the expansion closer to the
1882 actual machine, then just general_operand, which will allow 2 separate
1883 memory references (one output, one input) in a single insn. Return
1884 whether the insn fails, or succeeds. */
1885
1886 int
1887 ix86_expand_unary_operator (code, mode, operands)
1888 enum rtx_code code;
1889 enum machine_mode mode;
1890 rtx operands[];
1891 {
1892 /* If optimizing, copy to regs to improve CSE */
1893 if (TARGET_PSEUDO
1894 && optimize
1895 && ((reload_in_progress | reload_completed) == 0)
1896 && GET_CODE (operands[1]) == MEM)
1897 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1898
1899 if (! ix86_unary_operator_ok (code, mode, operands))
1900 {
1901 if ((! TARGET_PSEUDO || optimize == 0)
1902 && ((reload_in_progress | reload_completed) == 0)
1903 && GET_CODE (operands[1]) == MEM)
1904 {
1905 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1906 if (! ix86_unary_operator_ok (code, mode, operands))
1907 return FALSE;
1908 }
1909 else
1910 return FALSE;
1911 }
1912
1913 return TRUE;
1914 }
1915 \f
1916 /* Return TRUE or FALSE depending on whether the unary operator meets the
1917 appropriate constraints. */
1918
1919 int
1920 ix86_unary_operator_ok (code, mode, operands)
1921 enum rtx_code code ATTRIBUTE_UNUSED;
1922 enum machine_mode mode ATTRIBUTE_UNUSED;
1923 rtx operands[2] ATTRIBUTE_UNUSED;
1924 {
1925 return TRUE;
1926 }
1927 \f
1928 static rtx pic_label_rtx;
1929 static char pic_label_name [256];
1930 static int pic_label_no = 0;
1931
1932 /* This function generates code for -fpic that loads %ebx with
1933 the return address of the caller and then returns. */
1934
1935 void
1936 asm_output_function_prefix (file, name)
1937 FILE *file;
1938 char *name ATTRIBUTE_UNUSED;
1939 {
1940 rtx xops[2];
1941 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1942 || current_function_uses_const_pool);
1943 xops[0] = pic_offset_table_rtx;
1944 xops[1] = stack_pointer_rtx;
1945
1946 /* Deep branch prediction favors having a return for every call. */
1947 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1948 {
1949 tree prologue_node;
1950
1951 if (pic_label_rtx == 0)
1952 {
1953 pic_label_rtx = gen_label_rtx ();
1954 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1955 LABEL_NAME (pic_label_rtx) = pic_label_name;
1956 }
1957
1958 prologue_node = make_node (FUNCTION_DECL);
1959 DECL_RESULT (prologue_node) = 0;
1960
1961 /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1962 internal (non-global) label that's being emitted, it didn't make
1963 sense to have .type information for local labels. This caused
1964 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1965 me debug info for a label that you're declaring non-global?) this
1966 was changed to call ASM_OUTPUT_LABEL() instead. */
1967
1968
1969 ASM_OUTPUT_LABEL (file, pic_label_name);
1970 output_asm_insn ("movl (%1),%0", xops);
1971 output_asm_insn ("ret", xops);
1972 }
1973 }
1974
1975 /* Generate the assembly code for function entry.
1976 FILE is an stdio stream to output the code to.
1977 SIZE is an int: how many units of temporary storage to allocate. */
1978
1979 void
1980 function_prologue (file, size)
1981 FILE *file ATTRIBUTE_UNUSED;
1982 int size ATTRIBUTE_UNUSED;
1983 {
1984 if (TARGET_SCHEDULE_PROLOGUE)
1985 {
1986 pic_label_rtx = 0;
1987 return;
1988 }
1989
1990 ix86_prologue (0);
1991 }
1992
1993 /* Expand the prologue into a bunch of separate insns. */
1994
1995 void
1996 ix86_expand_prologue ()
1997 {
1998 if (! TARGET_SCHEDULE_PROLOGUE)
1999 return;
2000
2001 ix86_prologue (1);
2002 }
2003
2004 void
2005 load_pic_register (do_rtl)
2006 int do_rtl;
2007 {
2008 rtx xops[4];
2009
2010 if (TARGET_DEEP_BRANCH_PREDICTION)
2011 {
2012 xops[0] = pic_offset_table_rtx;
2013 if (pic_label_rtx == 0)
2014 {
2015 pic_label_rtx = gen_label_rtx ();
2016 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
2017 LABEL_NAME (pic_label_rtx) = pic_label_name;
2018 }
2019
2020 xops[1] = gen_rtx_MEM (QImode,
2021 gen_rtx (SYMBOL_REF, Pmode,
2022 LABEL_NAME (pic_label_rtx)));
2023
2024 if (do_rtl)
2025 {
2026 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2027 emit_insn (gen_prologue_set_got (xops[0],
2028 gen_rtx (SYMBOL_REF, Pmode,
2029 "$_GLOBAL_OFFSET_TABLE_"),
2030 xops[1]));
2031 }
2032 else
2033 {
2034 output_asm_insn (AS1 (call,%X1), xops);
2035 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
2036 pic_label_rtx = 0;
2037 }
2038 }
2039
2040 else
2041 {
2042 xops[0] = pic_offset_table_rtx;
2043 xops[1] = gen_label_rtx ();
2044
2045 if (do_rtl)
2046 {
2047 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2048 a new CODE_LABEL after reload, so we need a single pattern to
2049 emit the 3 necessary instructions. */
2050 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2051 }
2052 else
2053 {
2054 output_asm_insn (AS1 (call,%P1), xops);
2055 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2056 CODE_LABEL_NUMBER (xops[1]));
2057 output_asm_insn (AS1 (pop%L0,%0), xops);
2058 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2059 }
2060 }
2061
2062 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2063 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2064 moved before any instruction which implicitly uses the got. */
2065
2066 if (do_rtl)
2067 emit_insn (gen_blockage ());
2068 }
2069
2070 static void
2071 ix86_prologue (do_rtl)
2072 int do_rtl;
2073 {
2074 register int regno;
2075 int limit;
2076 rtx xops[4];
2077 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2078 || current_function_uses_const_pool);
2079 long tsize = get_frame_size ();
2080 rtx insn;
2081 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2082
2083 xops[0] = stack_pointer_rtx;
2084 xops[1] = frame_pointer_rtx;
2085 xops[2] = GEN_INT (tsize);
2086
2087 if (frame_pointer_needed)
2088 {
2089 if (do_rtl)
2090 {
2091 insn = emit_insn (gen_rtx (SET, VOIDmode,
2092 gen_rtx_MEM (SImode,
2093 gen_rtx (PRE_DEC, SImode,
2094 stack_pointer_rtx)),
2095 frame_pointer_rtx));
2096
2097 RTX_FRAME_RELATED_P (insn) = 1;
2098 insn = emit_move_insn (xops[1], xops[0]);
2099 RTX_FRAME_RELATED_P (insn) = 1;
2100 }
2101
2102 else
2103 {
2104 output_asm_insn ("push%L1 %1", xops);
2105 #ifdef INCOMING_RETURN_ADDR_RTX
2106 if (dwarf2out_do_frame ())
2107 {
2108 char *l = dwarf2out_cfi_label ();
2109
2110 cfa_store_offset += 4;
2111 cfa_offset = cfa_store_offset;
2112 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2113 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2114 }
2115 #endif
2116
2117 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2118 #ifdef INCOMING_RETURN_ADDR_RTX
2119 if (dwarf2out_do_frame ())
2120 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2121 #endif
2122 }
2123 }
2124
2125 if (tsize == 0)
2126 ;
2127 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2128 {
2129 if (do_rtl)
2130 {
2131 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2132 RTX_FRAME_RELATED_P (insn) = 1;
2133 }
2134 else
2135 {
2136 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2137 #ifdef INCOMING_RETURN_ADDR_RTX
2138 if (dwarf2out_do_frame ())
2139 {
2140 cfa_store_offset += tsize;
2141 if (! frame_pointer_needed)
2142 {
2143 cfa_offset = cfa_store_offset;
2144 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2145 }
2146 }
2147 #endif
2148 }
2149 }
2150 else
2151 {
2152 xops[3] = gen_rtx_REG (SImode, 0);
2153 if (do_rtl)
2154 emit_move_insn (xops[3], xops[2]);
2155 else
2156 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2157
2158 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2159 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2160
2161 if (do_rtl)
2162 emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2163 else
2164 output_asm_insn (AS1 (call,%P3), xops);
2165 }
2166
2167 /* Note If use enter it is NOT reversed args.
2168 This one is not reversed from intel!!
2169 I think enter is slower. Also sdb doesn't like it.
2170 But if you want it the code is:
2171 {
2172 xops[3] = const0_rtx;
2173 output_asm_insn ("enter %2,%3", xops);
2174 }
2175 */
2176
2177 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2178 for (regno = limit - 1; regno >= 0; regno--)
2179 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2180 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2181 {
2182 xops[0] = gen_rtx_REG (SImode, regno);
2183 if (do_rtl)
2184 {
2185 insn = emit_insn (gen_rtx (SET, VOIDmode,
2186 gen_rtx_MEM (SImode,
2187 gen_rtx (PRE_DEC, SImode,
2188 stack_pointer_rtx)),
2189 xops[0]));
2190
2191 RTX_FRAME_RELATED_P (insn) = 1;
2192 }
2193 else
2194 {
2195 output_asm_insn ("push%L0 %0", xops);
2196 #ifdef INCOMING_RETURN_ADDR_RTX
2197 if (dwarf2out_do_frame ())
2198 {
2199 char *l = dwarf2out_cfi_label ();
2200
2201 cfa_store_offset += 4;
2202 if (! frame_pointer_needed)
2203 {
2204 cfa_offset = cfa_store_offset;
2205 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2206 }
2207
2208 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2209 }
2210 #endif
2211 }
2212 }
2213
2214 #ifdef SUBTARGET_PROLOGUE
2215 SUBTARGET_PROLOGUE;
2216 #endif
2217
2218 if (pic_reg_used)
2219 load_pic_register (do_rtl);
2220
2221 /* If we are profiling, make sure no instructions are scheduled before
2222 the call to mcount. However, if -fpic, the above call will have
2223 done that. */
2224 if ((profile_flag || profile_block_flag)
2225 && ! pic_reg_used && do_rtl)
2226 emit_insn (gen_blockage ());
2227 }
2228
2229 /* Return 1 if it is appropriate to emit `ret' instructions in the
2230 body of a function. Do this only if the epilogue is simple, needing a
2231 couple of insns. Prior to reloading, we can't tell how many registers
2232 must be saved, so return 0 then. Return 0 if there is no frame
2233 marker to de-allocate.
2234
2235 If NON_SAVING_SETJMP is defined and true, then it is not possible
2236 for the epilogue to be simple, so return 0. This is a special case
2237 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2238 until final, but jump_optimize may need to know sooner if a
2239 `return' is OK. */
2240
2241 int
2242 ix86_can_use_return_insn_p ()
2243 {
2244 int regno;
2245 int nregs = 0;
2246 int reglimit = (frame_pointer_needed
2247 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2248 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2249 || current_function_uses_const_pool);
2250
2251 #ifdef NON_SAVING_SETJMP
2252 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2253 return 0;
2254 #endif
2255
2256 if (! reload_completed)
2257 return 0;
2258
2259 for (regno = reglimit - 1; regno >= 0; regno--)
2260 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2261 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2262 nregs++;
2263
2264 return nregs == 0 || ! frame_pointer_needed;
2265 }
2266
2267 /* This function generates the assembly code for function exit.
2268 FILE is an stdio stream to output the code to.
2269 SIZE is an int: how many units of temporary storage to deallocate. */
2270
2271 void
2272 function_epilogue (file, size)
2273 FILE *file ATTRIBUTE_UNUSED;
2274 int size ATTRIBUTE_UNUSED;
2275 {
2276 return;
2277 }
2278
2279 /* Restore function stack, frame, and registers. */
2280
2281 void
2282 ix86_expand_epilogue ()
2283 {
2284 ix86_epilogue (1);
2285 }
2286
2287 static void
2288 ix86_epilogue (do_rtl)
2289 int do_rtl;
2290 {
2291 register int regno;
2292 register int nregs, limit;
2293 int offset;
2294 rtx xops[3];
2295 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2296 || current_function_uses_const_pool);
2297 int sp_valid = !frame_pointer_needed || current_function_sp_is_unchanging;
2298 long tsize = get_frame_size ();
2299
2300 /* Compute the number of registers to pop */
2301
2302 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2303
2304 nregs = 0;
2305
2306 for (regno = limit - 1; regno >= 0; regno--)
2307 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2308 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2309 nregs++;
2310
2311 /* sp is often unreliable so we may have to go off the frame pointer. */
2312
2313 offset = - tsize - (nregs * UNITS_PER_WORD);
2314
2315 xops[2] = stack_pointer_rtx;
2316
2317 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2318 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2319 moved before any instruction which implicitly uses the got. This
2320 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2321
2322 Alternatively, this could be fixed by making the dependence on the
2323 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2324
2325 if (flag_pic || profile_flag || profile_block_flag)
2326 emit_insn (gen_blockage ());
2327
2328 /* If we're only restoring one register and sp is not valid then
2329 using a move instruction to restore the register since it's
2330 less work than reloading sp and popping the register. Otherwise,
2331 restore sp (if necessary) and pop the registers. */
2332
2333 if (nregs > 1 || sp_valid)
2334 {
2335 if ( !sp_valid )
2336 {
2337 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2338 if (do_rtl)
2339 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2340 else
2341 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2342 }
2343
2344 for (regno = 0; regno < limit; regno++)
2345 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2346 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2347 {
2348 xops[0] = gen_rtx_REG (SImode, regno);
2349
2350 if (do_rtl)
2351 emit_insn (gen_pop (xops[0]));
2352 else
2353 output_asm_insn ("pop%L0 %0", xops);
2354 }
2355 }
2356
2357 else
2358 for (regno = 0; regno < limit; regno++)
2359 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2360 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2361 {
2362 xops[0] = gen_rtx_REG (SImode, regno);
2363 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2364
2365 if (do_rtl)
2366 emit_move_insn (xops[0], xops[1]);
2367 else
2368 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2369
2370 offset += 4;
2371 }
2372
2373 if (frame_pointer_needed)
2374 {
2375 /* If not an i386, mov & pop is faster than "leave". */
2376
2377 if (TARGET_USE_LEAVE)
2378 {
2379 if (do_rtl)
2380 emit_insn (gen_leave());
2381 else
2382 output_asm_insn ("leave", xops);
2383 }
2384 else
2385 {
2386 xops[0] = frame_pointer_rtx;
2387 xops[1] = stack_pointer_rtx;
2388
2389 if (do_rtl)
2390 {
2391 emit_insn (gen_epilogue_set_stack_ptr());
2392 emit_insn (gen_pop (xops[0]));
2393 }
2394 else
2395 {
2396 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2397 output_asm_insn ("pop%L0 %0", xops);
2398 }
2399 }
2400 }
2401
2402 else if (tsize)
2403 {
2404 /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2405 use `pop' and not `add'. */
2406 int use_pop = tsize == 4;
2407
2408 /* Use two pops only for the Pentium processors. */
2409 if (tsize == 8 && !TARGET_386 && !TARGET_486)
2410 {
2411 rtx retval = current_function_return_rtx;
2412
2413 xops[1] = gen_rtx_REG (SImode, 1); /* %edx */
2414
2415 /* This case is a bit more complex. Since we cannot pop into
2416 %ecx twice we need a second register. But this is only
2417 available if the return value is not of DImode in which
2418 case the %edx register is not available. */
2419 use_pop = (retval == NULL
2420 || ! reg_overlap_mentioned_p (xops[1], retval));
2421 }
2422
2423 if (use_pop)
2424 {
2425 xops[0] = gen_rtx_REG (SImode, 2); /* %ecx */
2426
2427 if (do_rtl)
2428 {
2429 /* We have to prevent the two pops here from being scheduled.
2430 GCC otherwise would try in some situation to put other
2431 instructions in between them which has a bad effect. */
2432 emit_insn (gen_blockage ());
2433 emit_insn (gen_pop (xops[0]));
2434 if (tsize == 8)
2435 emit_insn (gen_pop (xops[1]));
2436 }
2437 else
2438 {
2439 output_asm_insn ("pop%L0 %0", xops);
2440 if (tsize == 8)
2441 output_asm_insn ("pop%L1 %1", xops);
2442 }
2443 }
2444 else
2445 {
2446 /* If there is no frame pointer, we must still release the frame. */
2447 xops[0] = GEN_INT (tsize);
2448
2449 if (do_rtl)
2450 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2451 gen_rtx (PLUS, SImode, xops[2], xops[0])));
2452 else
2453 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2454 }
2455 }
2456
2457 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2458 if (profile_block_flag == 2)
2459 {
2460 FUNCTION_BLOCK_PROFILER_EXIT(file);
2461 }
2462 #endif
2463
2464 if (current_function_pops_args && current_function_args_size)
2465 {
2466 xops[1] = GEN_INT (current_function_pops_args);
2467
2468 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2469 asked to pop more, pop return address, do explicit add, and jump
2470 indirectly to the caller. */
2471
2472 if (current_function_pops_args >= 32768)
2473 {
2474 /* ??? Which register to use here? */
2475 xops[0] = gen_rtx_REG (SImode, 2);
2476
2477 if (do_rtl)
2478 {
2479 emit_insn (gen_pop (xops[0]));
2480 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2481 gen_rtx (PLUS, SImode, xops[1], xops[2])));
2482 emit_jump_insn (xops[0]);
2483 }
2484 else
2485 {
2486 output_asm_insn ("pop%L0 %0", xops);
2487 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2488 output_asm_insn ("jmp %*%0", xops);
2489 }
2490 }
2491 else
2492 {
2493 if (do_rtl)
2494 emit_jump_insn (gen_return_pop_internal (xops[1]));
2495 else
2496 output_asm_insn ("ret %1", xops);
2497 }
2498 }
2499 else
2500 {
2501 if (do_rtl)
2502 emit_jump_insn (gen_return_internal ());
2503 else
2504 output_asm_insn ("ret", xops);
2505 }
2506 }
2507 \f
2508 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2509 that is a valid memory address for an instruction.
2510 The MODE argument is the machine mode for the MEM expression
2511 that wants to use this address.
2512
2513 On x86, legitimate addresses are:
2514 base movl (base),reg
2515 displacement movl disp,reg
2516 base + displacement movl disp(base),reg
2517 index + base movl (base,index),reg
2518 (index + base) + displacement movl disp(base,index),reg
2519 index*scale movl (,index,scale),reg
2520 index*scale + disp movl disp(,index,scale),reg
2521 index*scale + base movl (base,index,scale),reg
2522 (index*scale + base) + disp movl disp(base,index,scale),reg
2523
2524 In each case, scale can be 1, 2, 4, 8. */
2525
2526 /* This is exactly the same as print_operand_addr, except that
2527 it recognizes addresses instead of printing them.
2528
2529 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2530 convert common non-canonical forms to canonical form so that they will
2531 be recognized. */
2532
2533 #define ADDR_INVALID(msg,insn) \
2534 do { \
2535 if (TARGET_DEBUG_ADDR) \
2536 { \
2537 fprintf (stderr, msg); \
2538 debug_rtx (insn); \
2539 } \
2540 } while (0)
2541
2542 static int
2543 legitimate_pic_address_disp_p (disp)
2544 register rtx disp;
2545 {
2546 if (GET_CODE (disp) != CONST)
2547 return 0;
2548 disp = XEXP (disp, 0);
2549
2550 if (GET_CODE (disp) == PLUS)
2551 {
2552 if (GET_CODE (XEXP (disp, 1)) != CONST_INT)
2553 return 0;
2554 disp = XEXP (disp, 0);
2555 }
2556
2557 if (GET_CODE (disp) != UNSPEC
2558 || XVECLEN (disp, 0) != 1)
2559 return 0;
2560
2561 /* Must be @GOT or @GOTOFF. */
2562 if (XINT (disp, 1) != 6
2563 && XINT (disp, 1) != 7)
2564 return 0;
2565
2566 if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
2567 && GET_CODE (XVECEXP (disp, 0, 0)) != LABEL_REF)
2568 return 0;
2569
2570 return 1;
2571 }
2572
2573 int
2574 legitimate_address_p (mode, addr, strict)
2575 enum machine_mode mode;
2576 register rtx addr;
2577 int strict;
2578 {
2579 rtx base = NULL_RTX;
2580 rtx indx = NULL_RTX;
2581 rtx scale = NULL_RTX;
2582 rtx disp = NULL_RTX;
2583
2584 if (TARGET_DEBUG_ADDR)
2585 {
2586 fprintf (stderr,
2587 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2588 GET_MODE_NAME (mode), strict);
2589
2590 debug_rtx (addr);
2591 }
2592
2593 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2594 base = addr;
2595
2596 else if (GET_CODE (addr) == PLUS)
2597 {
2598 rtx op0 = XEXP (addr, 0);
2599 rtx op1 = XEXP (addr, 1);
2600 enum rtx_code code0 = GET_CODE (op0);
2601 enum rtx_code code1 = GET_CODE (op1);
2602
2603 if (code0 == REG || code0 == SUBREG)
2604 {
2605 if (code1 == REG || code1 == SUBREG)
2606 {
2607 indx = op0; /* index + base */
2608 base = op1;
2609 }
2610
2611 else
2612 {
2613 base = op0; /* base + displacement */
2614 disp = op1;
2615 }
2616 }
2617
2618 else if (code0 == MULT)
2619 {
2620 indx = XEXP (op0, 0);
2621 scale = XEXP (op0, 1);
2622
2623 if (code1 == REG || code1 == SUBREG)
2624 base = op1; /* index*scale + base */
2625
2626 else
2627 disp = op1; /* index*scale + disp */
2628 }
2629
2630 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2631 {
2632 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2633 scale = XEXP (XEXP (op0, 0), 1);
2634 base = XEXP (op0, 1);
2635 disp = op1;
2636 }
2637
2638 else if (code0 == PLUS)
2639 {
2640 indx = XEXP (op0, 0); /* index + base + disp */
2641 base = XEXP (op0, 1);
2642 disp = op1;
2643 }
2644
2645 else
2646 {
2647 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2648 return FALSE;
2649 }
2650 }
2651
2652 else if (GET_CODE (addr) == MULT)
2653 {
2654 indx = XEXP (addr, 0); /* index*scale */
2655 scale = XEXP (addr, 1);
2656 }
2657
2658 else
2659 disp = addr; /* displacement */
2660
2661 /* Allow arg pointer and stack pointer as index if there is not scaling */
2662 if (base && indx && !scale
2663 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2664 {
2665 rtx tmp = base;
2666 base = indx;
2667 indx = tmp;
2668 }
2669
2670 /* Validate base register:
2671
2672 Don't allow SUBREG's here, it can lead to spill failures when the base
2673 is one word out of a two word structure, which is represented internally
2674 as a DImode int. */
2675
2676 if (base)
2677 {
2678 if (GET_CODE (base) != REG)
2679 {
2680 ADDR_INVALID ("Base is not a register.\n", base);
2681 return FALSE;
2682 }
2683
2684 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2685 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2686 {
2687 ADDR_INVALID ("Base is not valid.\n", base);
2688 return FALSE;
2689 }
2690 }
2691
2692 /* Validate index register:
2693
2694 Don't allow SUBREG's here, it can lead to spill failures when the index
2695 is one word out of a two word structure, which is represented internally
2696 as a DImode int. */
2697 if (indx)
2698 {
2699 if (GET_CODE (indx) != REG)
2700 {
2701 ADDR_INVALID ("Index is not a register.\n", indx);
2702 return FALSE;
2703 }
2704
2705 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2706 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2707 {
2708 ADDR_INVALID ("Index is not valid.\n", indx);
2709 return FALSE;
2710 }
2711 }
2712 else if (scale)
2713 abort (); /* scale w/o index invalid */
2714
2715 /* Validate scale factor: */
2716 if (scale)
2717 {
2718 HOST_WIDE_INT value;
2719
2720 if (GET_CODE (scale) != CONST_INT)
2721 {
2722 ADDR_INVALID ("Scale is not valid.\n", scale);
2723 return FALSE;
2724 }
2725
2726 value = INTVAL (scale);
2727 if (value != 1 && value != 2 && value != 4 && value != 8)
2728 {
2729 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2730 return FALSE;
2731 }
2732 }
2733
2734 /* Validate displacement. */
2735 if (disp)
2736 {
2737 if (!CONSTANT_ADDRESS_P (disp))
2738 {
2739 ADDR_INVALID ("Displacement is not valid.\n", disp);
2740 return FALSE;
2741 }
2742
2743 else if (GET_CODE (disp) == CONST_DOUBLE)
2744 {
2745 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2746 return FALSE;
2747 }
2748
2749 if (flag_pic && SYMBOLIC_CONST (disp))
2750 {
2751 if (! legitimate_pic_address_disp_p (disp))
2752 {
2753 ADDR_INVALID ("Displacement is an invalid PIC construct.\n",
2754 disp);
2755 return FALSE;
2756 }
2757
2758 if (base != pic_offset_table_rtx
2759 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2760 {
2761 ADDR_INVALID ("PIC displacement against invalid base.\n", disp);
2762 return FALSE;
2763 }
2764 }
2765
2766 else if (HALF_PIC_P ())
2767 {
2768 if (! HALF_PIC_ADDRESS_P (disp)
2769 || (base != NULL_RTX || indx != NULL_RTX))
2770 {
2771 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2772 disp);
2773 return FALSE;
2774 }
2775 }
2776 }
2777
2778 if (TARGET_DEBUG_ADDR)
2779 fprintf (stderr, "Address is valid.\n");
2780
2781 /* Everything looks valid, return true */
2782 return TRUE;
2783 }
2784 \f
2785 /* Return a legitimate reference for ORIG (an address) using the
2786 register REG. If REG is 0, a new pseudo is generated.
2787
2788 There are two types of references that must be handled:
2789
2790 1. Global data references must load the address from the GOT, via
2791 the PIC reg. An insn is emitted to do this load, and the reg is
2792 returned.
2793
2794 2. Static data references, constant pool addresses, and code labels
2795 compute the address as an offset from the GOT, whose base is in
2796 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
2797 differentiate them from global data objects. The returned
2798 address is the PIC reg + an unspec constant.
2799
2800 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2801 reg also appears in the address. */
2802
2803 rtx
2804 legitimize_pic_address (orig, reg)
2805 rtx orig;
2806 rtx reg;
2807 {
2808 rtx addr = orig;
2809 rtx new = orig;
2810 rtx base;
2811
2812 if (GET_CODE (addr) == LABEL_REF
2813 || (GET_CODE (addr) == SYMBOL_REF
2814 && (CONSTANT_POOL_ADDRESS_P (addr)
2815 || SYMBOL_REF_FLAG (addr))))
2816 {
2817 /* This symbol may be referenced via a displacement from the PIC
2818 base address (@GOTOFF). */
2819
2820 current_function_uses_pic_offset_table = 1;
2821 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 7);
2822 new = gen_rtx_CONST (VOIDmode, new);
2823 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2824
2825 if (reg != 0)
2826 {
2827 emit_move_insn (reg, new);
2828 new = reg;
2829 }
2830 }
2831 else if (GET_CODE (addr) == SYMBOL_REF)
2832 {
2833 /* This symbol must be referenced via a load from the
2834 Global Offset Table (@GOT). */
2835
2836 current_function_uses_pic_offset_table = 1;
2837 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 6);
2838 new = gen_rtx_CONST (VOIDmode, new);
2839 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2840 new = gen_rtx_MEM (Pmode, new);
2841 RTX_UNCHANGING_P (new) = 1;
2842
2843 if (reg == 0)
2844 reg = gen_reg_rtx (Pmode);
2845 emit_move_insn (reg, new);
2846 new = reg;
2847 }
2848 else
2849 {
2850 if (GET_CODE (addr) == CONST)
2851 {
2852 addr = XEXP (addr, 0);
2853 if (GET_CODE (addr) == UNSPEC)
2854 {
2855 /* Check that the unspec is one of the ones we generate? */
2856 }
2857 else if (GET_CODE (addr) != PLUS)
2858 abort();
2859 }
2860 if (GET_CODE (addr) == PLUS)
2861 {
2862 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2863
2864 /* Check first to see if this is a constant offset from a @GOTOFF
2865 symbol reference. */
2866 if ((GET_CODE (op0) == LABEL_REF
2867 || (GET_CODE (op0) == SYMBOL_REF
2868 && (CONSTANT_POOL_ADDRESS_P (op0)
2869 || SYMBOL_REF_FLAG (op0))))
2870 && GET_CODE (op1) == CONST_INT)
2871 {
2872 current_function_uses_pic_offset_table = 1;
2873 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, op0), 7);
2874 new = gen_rtx_PLUS (VOIDmode, new, op1);
2875 new = gen_rtx_CONST (VOIDmode, new);
2876 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2877
2878 if (reg != 0)
2879 {
2880 emit_move_insn (reg, new);
2881 new = reg;
2882 }
2883 }
2884 else
2885 {
2886 base = legitimize_pic_address (XEXP (addr, 0), reg);
2887 new = legitimize_pic_address (XEXP (addr, 1),
2888 base == reg ? NULL_RTX : reg);
2889
2890 if (GET_CODE (new) == CONST_INT)
2891 new = plus_constant (base, INTVAL (new));
2892 else
2893 {
2894 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2895 {
2896 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2897 new = XEXP (new, 1);
2898 }
2899 new = gen_rtx_PLUS (Pmode, base, new);
2900 }
2901 }
2902 }
2903 }
2904 return new;
2905 }
2906 \f
2907 /* Emit insns to move operands[1] into operands[0]. */
2908
2909 void
2910 emit_pic_move (operands, mode)
2911 rtx *operands;
2912 enum machine_mode mode ATTRIBUTE_UNUSED;
2913 {
2914 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2915
2916 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2917 operands[1] = force_reg (Pmode, operands[1]);
2918 else
2919 operands[1] = legitimize_pic_address (operands[1], temp);
2920 }
2921 \f
2922 /* Try machine-dependent ways of modifying an illegitimate address
2923 to be legitimate. If we find one, return the new, valid address.
2924 This macro is used in only one place: `memory_address' in explow.c.
2925
2926 OLDX is the address as it was before break_out_memory_refs was called.
2927 In some cases it is useful to look at this to decide what needs to be done.
2928
2929 MODE and WIN are passed so that this macro can use
2930 GO_IF_LEGITIMATE_ADDRESS.
2931
2932 It is always safe for this macro to do nothing. It exists to recognize
2933 opportunities to optimize the output.
2934
2935 For the 80386, we handle X+REG by loading X into a register R and
2936 using R+REG. R will go in a general reg and indexing will be used.
2937 However, if REG is a broken-out memory address or multiplication,
2938 nothing needs to be done because REG can certainly go in a general reg.
2939
2940 When -fpic is used, special handling is needed for symbolic references.
2941 See comments by legitimize_pic_address in i386.c for details. */
2942
2943 rtx
2944 legitimize_address (x, oldx, mode)
2945 register rtx x;
2946 register rtx oldx ATTRIBUTE_UNUSED;
2947 enum machine_mode mode;
2948 {
2949 int changed = 0;
2950 unsigned log;
2951
2952 if (TARGET_DEBUG_ADDR)
2953 {
2954 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2955 GET_MODE_NAME (mode));
2956 debug_rtx (x);
2957 }
2958
2959 if (flag_pic && SYMBOLIC_CONST (x))
2960 return legitimize_pic_address (x, 0);
2961
2962 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2963 if (GET_CODE (x) == ASHIFT
2964 && GET_CODE (XEXP (x, 1)) == CONST_INT
2965 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2966 {
2967 changed = 1;
2968 x = gen_rtx_MULT (Pmode, force_reg (Pmode, XEXP (x, 0)),
2969 GEN_INT (1 << log));
2970 }
2971
2972 if (GET_CODE (x) == PLUS)
2973 {
2974 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2975
2976 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2977 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2978 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2979 {
2980 changed = 1;
2981 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2982 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2983 GEN_INT (1 << log));
2984 }
2985
2986 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2987 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2988 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2989 {
2990 changed = 1;
2991 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2992 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2993 GEN_INT (1 << log));
2994 }
2995
2996 /* Put multiply first if it isn't already. */
2997 if (GET_CODE (XEXP (x, 1)) == MULT)
2998 {
2999 rtx tmp = XEXP (x, 0);
3000 XEXP (x, 0) = XEXP (x, 1);
3001 XEXP (x, 1) = tmp;
3002 changed = 1;
3003 }
3004
3005 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
3006 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
3007 created by virtual register instantiation, register elimination, and
3008 similar optimizations. */
3009 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
3010 {
3011 changed = 1;
3012 x = gen_rtx (PLUS, Pmode,
3013 gen_rtx (PLUS, Pmode, XEXP (x, 0),
3014 XEXP (XEXP (x, 1), 0)),
3015 XEXP (XEXP (x, 1), 1));
3016 }
3017
3018 /* Canonicalize
3019 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
3020 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
3021 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
3022 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3023 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
3024 && CONSTANT_P (XEXP (x, 1)))
3025 {
3026 rtx constant;
3027 rtx other = NULL_RTX;
3028
3029 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3030 {
3031 constant = XEXP (x, 1);
3032 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
3033 }
3034 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
3035 {
3036 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
3037 other = XEXP (x, 1);
3038 }
3039 else
3040 constant = 0;
3041
3042 if (constant)
3043 {
3044 changed = 1;
3045 x = gen_rtx (PLUS, Pmode,
3046 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
3047 XEXP (XEXP (XEXP (x, 0), 1), 0)),
3048 plus_constant (other, INTVAL (constant)));
3049 }
3050 }
3051
3052 if (changed && legitimate_address_p (mode, x, FALSE))
3053 return x;
3054
3055 if (GET_CODE (XEXP (x, 0)) == MULT)
3056 {
3057 changed = 1;
3058 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
3059 }
3060
3061 if (GET_CODE (XEXP (x, 1)) == MULT)
3062 {
3063 changed = 1;
3064 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
3065 }
3066
3067 if (changed
3068 && GET_CODE (XEXP (x, 1)) == REG
3069 && GET_CODE (XEXP (x, 0)) == REG)
3070 return x;
3071
3072 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
3073 {
3074 changed = 1;
3075 x = legitimize_pic_address (x, 0);
3076 }
3077
3078 if (changed && legitimate_address_p (mode, x, FALSE))
3079 return x;
3080
3081 if (GET_CODE (XEXP (x, 0)) == REG)
3082 {
3083 register rtx temp = gen_reg_rtx (Pmode);
3084 register rtx val = force_operand (XEXP (x, 1), temp);
3085 if (val != temp)
3086 emit_move_insn (temp, val);
3087
3088 XEXP (x, 1) = temp;
3089 return x;
3090 }
3091
3092 else if (GET_CODE (XEXP (x, 1)) == REG)
3093 {
3094 register rtx temp = gen_reg_rtx (Pmode);
3095 register rtx val = force_operand (XEXP (x, 0), temp);
3096 if (val != temp)
3097 emit_move_insn (temp, val);
3098
3099 XEXP (x, 0) = temp;
3100 return x;
3101 }
3102 }
3103
3104 return x;
3105 }
3106 \f
3107 /* Print an integer constant expression in assembler syntax. Addition
3108 and subtraction are the only arithmetic that may appear in these
3109 expressions. FILE is the stdio stream to write to, X is the rtx, and
3110 CODE is the operand print code from the output string. */
3111
3112 static void
3113 output_pic_addr_const (file, x, code)
3114 FILE *file;
3115 rtx x;
3116 int code;
3117 {
3118 char buf[256];
3119
3120 switch (GET_CODE (x))
3121 {
3122 case PC:
3123 if (flag_pic)
3124 putc ('.', file);
3125 else
3126 abort ();
3127 break;
3128
3129 case SYMBOL_REF:
3130 assemble_name (file, XSTR (x, 0));
3131 if (code == 'P' && ! SYMBOL_REF_FLAG (x))
3132 fputs ("@PLT", file);
3133 break;
3134
3135 case LABEL_REF:
3136 x = XEXP (x, 0);
3137 /* FALLTHRU */
3138 case CODE_LABEL:
3139 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3140 assemble_name (asm_out_file, buf);
3141 break;
3142
3143 case CONST_INT:
3144 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3145 break;
3146
3147 case CONST:
3148 /* This used to output parentheses around the expression,
3149 but that does not work on the 386 (either ATT or BSD assembler). */
3150 output_pic_addr_const (file, XEXP (x, 0), code);
3151 break;
3152
3153 case CONST_DOUBLE:
3154 if (GET_MODE (x) == VOIDmode)
3155 {
3156 /* We can use %d if the number is <32 bits and positive. */
3157 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3158 fprintf (file, "0x%lx%08lx",
3159 (unsigned long) CONST_DOUBLE_HIGH (x),
3160 (unsigned long) CONST_DOUBLE_LOW (x));
3161 else
3162 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3163 }
3164 else
3165 /* We can't handle floating point constants;
3166 PRINT_OPERAND must handle them. */
3167 output_operand_lossage ("floating constant misused");
3168 break;
3169
3170 case PLUS:
3171 /* Some assemblers need integer constants to appear first. */
3172 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3173 {
3174 output_pic_addr_const (file, XEXP (x, 0), code);
3175 fprintf (file, "+");
3176 output_pic_addr_const (file, XEXP (x, 1), code);
3177 }
3178 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3179 {
3180 output_pic_addr_const (file, XEXP (x, 1), code);
3181 fprintf (file, "+");
3182 output_pic_addr_const (file, XEXP (x, 0), code);
3183 }
3184 else
3185 abort ();
3186 break;
3187
3188 case MINUS:
3189 output_pic_addr_const (file, XEXP (x, 0), code);
3190 fprintf (file, "-");
3191 output_pic_addr_const (file, XEXP (x, 1), code);
3192 break;
3193
3194 case UNSPEC:
3195 if (XVECLEN (x, 0) != 1)
3196 abort ();
3197 output_pic_addr_const (file, XVECEXP (x, 0, 0), code);
3198 switch (XINT (x, 1))
3199 {
3200 case 6:
3201 fputs ("@GOT", file);
3202 break;
3203 case 7:
3204 fputs ("@GOTOFF", file);
3205 break;
3206 case 8:
3207 fputs ("@PLT", file);
3208 break;
3209 default:
3210 output_operand_lossage ("invalid UNSPEC as operand");
3211 break;
3212 }
3213 break;
3214
3215 default:
3216 output_operand_lossage ("invalid expression as operand");
3217 }
3218 }
3219 \f
3220 static void
3221 put_jump_code (code, reverse, file)
3222 enum rtx_code code;
3223 int reverse;
3224 FILE *file;
3225 {
3226 int flags = cc_prev_status.flags;
3227 int ieee = (TARGET_IEEE_FP && (flags & CC_IN_80387));
3228 const char *suffix;
3229
3230 if (flags & CC_Z_IN_NOT_C)
3231 switch (code)
3232 {
3233 case EQ:
3234 fputs (reverse ? "c" : "nc", file);
3235 return;
3236
3237 case NE:
3238 fputs (reverse ? "nc" : "c", file);
3239 return;
3240
3241 default:
3242 abort ();
3243 }
3244 if (ieee)
3245 {
3246 switch (code)
3247 {
3248 case LE:
3249 suffix = reverse ? "ae" : "b";
3250 break;
3251 case GT:
3252 case LT:
3253 case GE:
3254 suffix = reverse ? "ne" : "e";
3255 break;
3256 case EQ:
3257 suffix = reverse ? "ne" : "e";
3258 break;
3259 case NE:
3260 suffix = reverse ? "e" : "ne";
3261 break;
3262 default:
3263 abort ();
3264 }
3265 fputs (suffix, file);
3266 return;
3267 }
3268 if (flags & CC_TEST_AX)
3269 abort();
3270 if ((flags & CC_NO_OVERFLOW) && (code == LE || code == GT))
3271 abort ();
3272 if (reverse)
3273 code = reverse_condition (code);
3274 switch (code)
3275 {
3276 case EQ:
3277 suffix = "e";
3278 break;
3279
3280 case NE:
3281 suffix = "ne";
3282 break;
3283
3284 case GT:
3285 suffix = flags & CC_IN_80387 ? "a" : "g";
3286 break;
3287
3288 case GTU:
3289 suffix = "a";
3290 break;
3291
3292 case LT:
3293 if (flags & CC_NO_OVERFLOW)
3294 suffix = "s";
3295 else
3296 suffix = flags & CC_IN_80387 ? "b" : "l";
3297 break;
3298
3299 case LTU:
3300 suffix = "b";
3301 break;
3302
3303 case GE:
3304 if (flags & CC_NO_OVERFLOW)
3305 suffix = "ns";
3306 else
3307 suffix = flags & CC_IN_80387 ? "ae" : "ge";
3308 break;
3309
3310 case GEU:
3311 suffix = "ae";
3312 break;
3313
3314 case LE:
3315 suffix = flags & CC_IN_80387 ? "be" : "le";
3316 break;
3317
3318 case LEU:
3319 suffix = "be";
3320 break;
3321
3322 default:
3323 abort ();
3324 }
3325 fputs (suffix, file);
3326 }
3327
3328 /* Append the correct conditional move suffix which corresponds to CODE. */
3329
3330 static void
3331 put_condition_code (code, reverse_cc, mode, file)
3332 enum rtx_code code;
3333 int reverse_cc;
3334 enum mode_class mode;
3335 FILE * file;
3336 {
3337 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3338 && ! (cc_prev_status.flags & CC_FCOMI));
3339 if (reverse_cc && ! ieee)
3340 code = reverse_condition (code);
3341
3342 if (mode == MODE_INT)
3343 switch (code)
3344 {
3345 case NE:
3346 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3347 fputs ("b", file);
3348 else
3349 fputs ("ne", file);
3350 return;
3351
3352 case EQ:
3353 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3354 fputs ("ae", file);
3355 else
3356 fputs ("e", file);
3357 return;
3358
3359 case GE:
3360 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3361 fputs ("ns", file);
3362 else
3363 fputs ("ge", file);
3364 return;
3365
3366 case GT:
3367 fputs ("g", file);
3368 return;
3369
3370 case LE:
3371 fputs ("le", file);
3372 return;
3373
3374 case LT:
3375 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3376 fputs ("s", file);
3377 else
3378 fputs ("l", file);
3379 return;
3380
3381 case GEU:
3382 fputs ("ae", file);
3383 return;
3384
3385 case GTU:
3386 fputs ("a", file);
3387 return;
3388
3389 case LEU:
3390 fputs ("be", file);
3391 return;
3392
3393 case LTU:
3394 fputs ("b", file);
3395 return;
3396
3397 default:
3398 output_operand_lossage ("Invalid %%C operand");
3399 }
3400
3401 else if (mode == MODE_FLOAT)
3402 switch (code)
3403 {
3404 case NE:
3405 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3406 return;
3407 case EQ:
3408 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3409 return;
3410 case GE:
3411 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3412 return;
3413 case GT:
3414 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3415 return;
3416 case LE:
3417 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3418 return;
3419 case LT:
3420 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3421 return;
3422 case GEU:
3423 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3424 return;
3425 case GTU:
3426 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3427 return;
3428 case LEU:
3429 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3430 return;
3431 case LTU:
3432 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3433 return;
3434 default:
3435 output_operand_lossage ("Invalid %%C operand");
3436 }
3437 }
3438
3439 /* Meaning of CODE:
3440 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3441 C -- print opcode suffix for set/cmov insn.
3442 c -- like C, but print reversed condition
3443 F -- print opcode suffix for fcmov insn.
3444 f -- like F, but print reversed condition
3445 D -- print the opcode suffix for a jump
3446 d -- like D, but print reversed condition
3447 R -- print the prefix for register names.
3448 z -- print the opcode suffix for the size of the current operand.
3449 * -- print a star (in certain assembler syntax)
3450 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3451 J -- print the appropriate jump operand.
3452 s -- print a shift double count, followed by the assemblers argument
3453 delimiter.
3454 b -- print the QImode name of the register for the indicated operand.
3455 %b0 would print %al if operands[0] is reg 0.
3456 w -- likewise, print the HImode name of the register.
3457 k -- likewise, print the SImode name of the register.
3458 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3459 y -- print "st(0)" instead of "st" as a register.
3460 P -- print as a PIC constant */
3461
3462 void
3463 print_operand (file, x, code)
3464 FILE *file;
3465 rtx x;
3466 int code;
3467 {
3468 if (code)
3469 {
3470 switch (code)
3471 {
3472 case '*':
3473 if (USE_STAR)
3474 putc ('*', file);
3475 return;
3476
3477 case 'L':
3478 PUT_OP_SIZE (code, 'l', file);
3479 return;
3480
3481 case 'W':
3482 PUT_OP_SIZE (code, 'w', file);
3483 return;
3484
3485 case 'B':
3486 PUT_OP_SIZE (code, 'b', file);
3487 return;
3488
3489 case 'Q':
3490 PUT_OP_SIZE (code, 'l', file);
3491 return;
3492
3493 case 'S':
3494 PUT_OP_SIZE (code, 's', file);
3495 return;
3496
3497 case 'T':
3498 PUT_OP_SIZE (code, 't', file);
3499 return;
3500
3501 case 'z':
3502 /* 387 opcodes don't get size suffixes if the operands are
3503 registers. */
3504
3505 if (STACK_REG_P (x))
3506 return;
3507
3508 /* this is the size of op from size of operand */
3509 switch (GET_MODE_SIZE (GET_MODE (x)))
3510 {
3511 case 1:
3512 PUT_OP_SIZE ('B', 'b', file);
3513 return;
3514
3515 case 2:
3516 PUT_OP_SIZE ('W', 'w', file);
3517 return;
3518
3519 case 4:
3520 if (GET_MODE (x) == SFmode)
3521 {
3522 PUT_OP_SIZE ('S', 's', file);
3523 return;
3524 }
3525 else
3526 PUT_OP_SIZE ('L', 'l', file);
3527 return;
3528
3529 case 12:
3530 PUT_OP_SIZE ('T', 't', file);
3531 return;
3532
3533 case 8:
3534 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3535 {
3536 #ifdef GAS_MNEMONICS
3537 PUT_OP_SIZE ('Q', 'q', file);
3538 return;
3539 #else
3540 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3541 #endif
3542 }
3543
3544 PUT_OP_SIZE ('Q', 'l', file);
3545 return;
3546 }
3547
3548 case 'b':
3549 case 'w':
3550 case 'k':
3551 case 'h':
3552 case 'y':
3553 case 'P':
3554 case 'X':
3555 break;
3556
3557 case 'J':
3558 switch (GET_CODE (x))
3559 {
3560 /* These conditions are appropriate for testing the result
3561 of an arithmetic operation, not for a compare operation.
3562 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3563 CC_Z_IN_NOT_C false and not floating point. */
3564 case NE: fputs ("jne", file); return;
3565 case EQ: fputs ("je", file); return;
3566 case GE: fputs ("jns", file); return;
3567 case LT: fputs ("js", file); return;
3568 case GEU: fputs ("jmp", file); return;
3569 case GTU: fputs ("jne", file); return;
3570 case LEU: fputs ("je", file); return;
3571 case LTU: fputs ("#branch never", file); return;
3572
3573 /* no matching branches for GT nor LE */
3574
3575 default:
3576 abort ();
3577 }
3578
3579 case 's':
3580 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3581 {
3582 PRINT_OPERAND (file, x, 0);
3583 fputs (AS2C (,) + 1, file);
3584 }
3585
3586 return;
3587
3588 case 'D':
3589 put_jump_code (GET_CODE (x), 0, file);
3590 return;
3591
3592 case 'd':
3593 put_jump_code (GET_CODE (x), 1, file);
3594 return;
3595
3596 /* This is used by the conditional move instructions. */
3597 case 'C':
3598 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3599 return;
3600
3601 /* Like above, but reverse condition */
3602 case 'c':
3603 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3604
3605 case 'F':
3606 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3607 return;
3608
3609 /* Like above, but reverse condition */
3610 case 'f':
3611 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3612 return;
3613
3614 default:
3615 {
3616 char str[50];
3617
3618 sprintf (str, "invalid operand code `%c'", code);
3619 output_operand_lossage (str);
3620 }
3621 }
3622 }
3623
3624 if (GET_CODE (x) == REG)
3625 {
3626 PRINT_REG (x, code, file);
3627 }
3628
3629 else if (GET_CODE (x) == MEM)
3630 {
3631 PRINT_PTR (x, file);
3632 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3633 {
3634 if (flag_pic)
3635 output_pic_addr_const (file, XEXP (x, 0), code);
3636 else
3637 output_addr_const (file, XEXP (x, 0));
3638 }
3639 else
3640 output_address (XEXP (x, 0));
3641 }
3642
3643 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3644 {
3645 REAL_VALUE_TYPE r;
3646 long l;
3647
3648 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3649 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3650 PRINT_IMMED_PREFIX (file);
3651 fprintf (file, "0x%lx", l);
3652 }
3653
3654 /* These float cases don't actually occur as immediate operands. */
3655 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3656 {
3657 REAL_VALUE_TYPE r;
3658 char dstr[30];
3659
3660 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3661 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3662 fprintf (file, "%s", dstr);
3663 }
3664
3665 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3666 {
3667 REAL_VALUE_TYPE r;
3668 char dstr[30];
3669
3670 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3671 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3672 fprintf (file, "%s", dstr);
3673 }
3674 else
3675 {
3676 if (code != 'P')
3677 {
3678 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3679 PRINT_IMMED_PREFIX (file);
3680 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3681 || GET_CODE (x) == LABEL_REF)
3682 PRINT_OFFSET_PREFIX (file);
3683 }
3684 if (flag_pic)
3685 output_pic_addr_const (file, x, code);
3686 else
3687 output_addr_const (file, x);
3688 }
3689 }
3690 \f
3691 /* Print a memory operand whose address is ADDR. */
3692
3693 void
3694 print_operand_address (file, addr)
3695 FILE *file;
3696 register rtx addr;
3697 {
3698 register rtx reg1, reg2, breg, ireg;
3699 rtx offset;
3700
3701 switch (GET_CODE (addr))
3702 {
3703 case REG:
3704 ADDR_BEG (file);
3705 fprintf (file, "%se", RP);
3706 fputs (hi_reg_name[REGNO (addr)], file);
3707 ADDR_END (file);
3708 break;
3709
3710 case PLUS:
3711 reg1 = 0;
3712 reg2 = 0;
3713 ireg = 0;
3714 breg = 0;
3715 offset = 0;
3716 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3717 {
3718 offset = XEXP (addr, 0);
3719 addr = XEXP (addr, 1);
3720 }
3721 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3722 {
3723 offset = XEXP (addr, 1);
3724 addr = XEXP (addr, 0);
3725 }
3726
3727 if (GET_CODE (addr) != PLUS)
3728 ;
3729 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3730 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3731 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3732 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3733 else if (GET_CODE (XEXP (addr, 0)) == REG)
3734 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3735 else if (GET_CODE (XEXP (addr, 1)) == REG)
3736 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3737
3738 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3739 {
3740 if (reg1 == 0)
3741 reg1 = addr;
3742 else
3743 reg2 = addr;
3744
3745 addr = 0;
3746 }
3747
3748 if (offset != 0)
3749 {
3750 if (addr != 0)
3751 abort ();
3752 addr = offset;
3753 }
3754
3755 if ((reg1 && GET_CODE (reg1) == MULT)
3756 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3757 {
3758 breg = reg2;
3759 ireg = reg1;
3760 }
3761 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3762 {
3763 breg = reg1;
3764 ireg = reg2;
3765 }
3766
3767 if (ireg != 0 || breg != 0)
3768 {
3769 int scale = 1;
3770
3771 if (addr != 0)
3772 {
3773 if (flag_pic)
3774 output_pic_addr_const (file, addr, 0);
3775 else if (GET_CODE (addr) == LABEL_REF)
3776 output_asm_label (addr);
3777 else
3778 output_addr_const (file, addr);
3779 }
3780
3781 if (ireg != 0 && GET_CODE (ireg) == MULT)
3782 {
3783 scale = INTVAL (XEXP (ireg, 1));
3784 ireg = XEXP (ireg, 0);
3785 }
3786
3787 /* The stack pointer can only appear as a base register,
3788 never an index register, so exchange the regs if it is wrong. */
3789
3790 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3791 {
3792 rtx tmp;
3793
3794 tmp = breg;
3795 breg = ireg;
3796 ireg = tmp;
3797 }
3798
3799 /* output breg+ireg*scale */
3800 PRINT_B_I_S (breg, ireg, scale, file);
3801 break;
3802 }
3803
3804 case MULT:
3805 {
3806 int scale;
3807
3808 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3809 {
3810 scale = INTVAL (XEXP (addr, 0));
3811 ireg = XEXP (addr, 1);
3812 }
3813 else
3814 {
3815 scale = INTVAL (XEXP (addr, 1));
3816 ireg = XEXP (addr, 0);
3817 }
3818
3819 output_addr_const (file, const0_rtx);
3820 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3821 }
3822 break;
3823
3824 default:
3825 if (GET_CODE (addr) == CONST_INT
3826 && INTVAL (addr) < 0x8000
3827 && INTVAL (addr) >= -0x8000)
3828 fprintf (file, "%d", (int) INTVAL (addr));
3829 else
3830 {
3831 if (flag_pic)
3832 output_pic_addr_const (file, addr, 0);
3833 else
3834 output_addr_const (file, addr);
3835 }
3836 }
3837 }
3838 \f
3839 /* Set the cc_status for the results of an insn whose pattern is EXP.
3840 On the 80386, we assume that only test and compare insns, as well
3841 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3842 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3843 Also, we assume that jumps, moves and sCOND don't affect the condition
3844 codes. All else clobbers the condition codes, by assumption.
3845
3846 We assume that ALL integer add, minus, etc. instructions effect the
3847 condition codes. This MUST be consistent with i386.md.
3848
3849 We don't record any float test or compare - the redundant test &
3850 compare check in final.c does not handle stack-like regs correctly. */
3851
3852 void
3853 notice_update_cc (exp)
3854 rtx exp;
3855 {
3856 if (GET_CODE (exp) == SET)
3857 {
3858 /* Jumps do not alter the cc's. */
3859 if (SET_DEST (exp) == pc_rtx)
3860 return;
3861
3862 /* Moving register or memory into a register:
3863 it doesn't alter the cc's, but it might invalidate
3864 the RTX's which we remember the cc's came from.
3865 (Note that moving a constant 0 or 1 MAY set the cc's). */
3866 if (REG_P (SET_DEST (exp))
3867 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3868 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3869 || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3870 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3871 {
3872 if (cc_status.value1
3873 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3874 cc_status.value1 = 0;
3875
3876 if (cc_status.value2
3877 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3878 cc_status.value2 = 0;
3879
3880 return;
3881 }
3882
3883 /* Moving register into memory doesn't alter the cc's.
3884 It may invalidate the RTX's which we remember the cc's came from. */
3885 if (GET_CODE (SET_DEST (exp)) == MEM
3886 && (REG_P (SET_SRC (exp))
3887 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3888 {
3889 if (cc_status.value1
3890 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3891 cc_status.value1 = 0;
3892 if (cc_status.value2
3893 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3894 cc_status.value2 = 0;
3895
3896 return;
3897 }
3898
3899 /* Function calls clobber the cc's. */
3900 else if (GET_CODE (SET_SRC (exp)) == CALL)
3901 {
3902 CC_STATUS_INIT;
3903 return;
3904 }
3905
3906 /* Tests and compares set the cc's in predictable ways. */
3907 else if (SET_DEST (exp) == cc0_rtx)
3908 {
3909 CC_STATUS_INIT;
3910 cc_status.value1 = SET_SRC (exp);
3911 return;
3912 }
3913
3914 /* Certain instructions effect the condition codes. */
3915 else if (GET_MODE (SET_SRC (exp)) == SImode
3916 || GET_MODE (SET_SRC (exp)) == HImode
3917 || GET_MODE (SET_SRC (exp)) == QImode)
3918 switch (GET_CODE (SET_SRC (exp)))
3919 {
3920 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3921 /* Shifts on the 386 don't set the condition codes if the
3922 shift count is zero. */
3923 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3924 {
3925 CC_STATUS_INIT;
3926 break;
3927 }
3928
3929 /* We assume that the CONST_INT is non-zero (this rtx would
3930 have been deleted if it were zero. */
3931
3932 case PLUS: case MINUS: case NEG:
3933 case AND: case IOR: case XOR:
3934 cc_status.flags = CC_NO_OVERFLOW;
3935 cc_status.value1 = SET_SRC (exp);
3936 cc_status.value2 = SET_DEST (exp);
3937 break;
3938
3939 /* This is the bsf pattern used by ffs. */
3940 case UNSPEC:
3941 if (XINT (SET_SRC (exp), 1) == 5)
3942 {
3943 /* Only the Z flag is defined after bsf. */
3944 cc_status.flags
3945 = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3946 cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3947 cc_status.value2 = 0;
3948 break;
3949 }
3950 /* FALLTHRU */
3951
3952 default:
3953 CC_STATUS_INIT;
3954 }
3955 else
3956 {
3957 CC_STATUS_INIT;
3958 }
3959 }
3960 else if (GET_CODE (exp) == PARALLEL
3961 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3962 {
3963 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3964 return;
3965 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3966
3967 {
3968 CC_STATUS_INIT;
3969 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3970 {
3971 cc_status.flags |= CC_IN_80387;
3972 if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3973 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3974 cc_status.flags |= CC_FCOMI;
3975 }
3976 else
3977 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3978 return;
3979 }
3980
3981 CC_STATUS_INIT;
3982 }
3983 else
3984 {
3985 CC_STATUS_INIT;
3986 }
3987 }
3988 \f
3989 /* Split one or more DImode RTL references into pairs of SImode
3990 references. The RTL can be REG, offsettable MEM, integer constant, or
3991 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3992 split and "num" is its length. lo_half and hi_half are output arrays
3993 that parallel "operands". */
3994
3995 void
3996 split_di (operands, num, lo_half, hi_half)
3997 rtx operands[];
3998 int num;
3999 rtx lo_half[], hi_half[];
4000 {
4001 while (num--)
4002 {
4003 rtx op = operands[num];
4004 if (! reload_completed)
4005 {
4006 lo_half[num] = gen_lowpart (SImode, op);
4007 hi_half[num] = gen_highpart (SImode, op);
4008 }
4009 else if (GET_CODE (op) == REG)
4010 {
4011 lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
4012 hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
4013 }
4014 else if (CONSTANT_P (op))
4015 split_double (op, &lo_half[num], &hi_half[num]);
4016 else if (offsettable_memref_p (op))
4017 {
4018 rtx lo_addr = XEXP (op, 0);
4019 rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
4020 lo_half[num] = change_address (op, SImode, lo_addr);
4021 hi_half[num] = change_address (op, SImode, hi_addr);
4022 }
4023 else
4024 abort();
4025 }
4026 }
4027 \f
4028 /* Return 1 if this is a valid binary operation on a 387.
4029 OP is the expression matched, and MODE is its mode. */
4030
4031 int
4032 binary_387_op (op, mode)
4033 register rtx op;
4034 enum machine_mode mode;
4035 {
4036 if (mode != VOIDmode && mode != GET_MODE (op))
4037 return 0;
4038
4039 switch (GET_CODE (op))
4040 {
4041 case PLUS:
4042 case MINUS:
4043 case MULT:
4044 case DIV:
4045 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
4046
4047 default:
4048 return 0;
4049 }
4050 }
4051 \f
4052 /* Return 1 if this is a valid shift or rotate operation on a 386.
4053 OP is the expression matched, and MODE is its mode. */
4054
4055 int
4056 shift_op (op, mode)
4057 register rtx op;
4058 enum machine_mode mode;
4059 {
4060 rtx operand = XEXP (op, 0);
4061
4062 if (mode != VOIDmode && mode != GET_MODE (op))
4063 return 0;
4064
4065 if (GET_MODE (operand) != GET_MODE (op)
4066 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
4067 return 0;
4068
4069 return (GET_CODE (op) == ASHIFT
4070 || GET_CODE (op) == ASHIFTRT
4071 || GET_CODE (op) == LSHIFTRT
4072 || GET_CODE (op) == ROTATE
4073 || GET_CODE (op) == ROTATERT);
4074 }
4075
4076 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
4077 MODE is not used. */
4078
4079 int
4080 VOIDmode_compare_op (op, mode)
4081 register rtx op;
4082 enum machine_mode mode ATTRIBUTE_UNUSED;
4083 {
4084 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
4085 }
4086 \f
4087 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
4088 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
4089 is the expression of the binary operation. The output may either be
4090 emitted here, or returned to the caller, like all output_* functions.
4091
4092 There is no guarantee that the operands are the same mode, as they
4093 might be within FLOAT or FLOAT_EXTEND expressions. */
4094
4095 char *
4096 output_387_binary_op (insn, operands)
4097 rtx insn;
4098 rtx *operands;
4099 {
4100 rtx temp;
4101 char *base_op;
4102 static char buf[100];
4103
4104 switch (GET_CODE (operands[3]))
4105 {
4106 case PLUS:
4107 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4108 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4109 base_op = "fiadd";
4110 else
4111 base_op = "fadd";
4112 break;
4113
4114 case MINUS:
4115 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4116 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4117 base_op = "fisub";
4118 else
4119 base_op = "fsub";
4120 break;
4121
4122 case MULT:
4123 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4124 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4125 base_op = "fimul";
4126 else
4127 base_op = "fmul";
4128 break;
4129
4130 case DIV:
4131 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4132 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4133 base_op = "fidiv";
4134 else
4135 base_op = "fdiv";
4136 break;
4137
4138 default:
4139 abort ();
4140 }
4141
4142 strcpy (buf, base_op);
4143
4144 switch (GET_CODE (operands[3]))
4145 {
4146 case MULT:
4147 case PLUS:
4148 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
4149 {
4150 temp = operands[2];
4151 operands[2] = operands[1];
4152 operands[1] = temp;
4153 }
4154
4155 if (GET_CODE (operands[2]) == MEM)
4156 return strcat (buf, AS1 (%z2,%2));
4157
4158 if (NON_STACK_REG_P (operands[1]))
4159 {
4160 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4161 return "";
4162 }
4163
4164 else if (NON_STACK_REG_P (operands[2]))
4165 {
4166 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4167 return "";
4168 }
4169
4170 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4171 {
4172 if (STACK_TOP_P (operands[0]))
4173 return strcat (buf, AS2 (p,%0,%2));
4174 else
4175 return strcat (buf, AS2 (p,%2,%0));
4176 }
4177
4178 if (STACK_TOP_P (operands[0]))
4179 return strcat (buf, AS2C (%y2,%0));
4180 else
4181 return strcat (buf, AS2C (%2,%0));
4182
4183 case MINUS:
4184 case DIV:
4185 if (GET_CODE (operands[1]) == MEM)
4186 return strcat (buf, AS1 (r%z1,%1));
4187
4188 if (GET_CODE (operands[2]) == MEM)
4189 return strcat (buf, AS1 (%z2,%2));
4190
4191 if (NON_STACK_REG_P (operands[1]))
4192 {
4193 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
4194 return "";
4195 }
4196
4197 else if (NON_STACK_REG_P (operands[2]))
4198 {
4199 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4200 return "";
4201 }
4202
4203 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
4204 abort ();
4205
4206 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4207 {
4208 if (STACK_TOP_P (operands[0]))
4209 return strcat (buf, AS2 (p,%0,%2));
4210 else
4211 return strcat (buf, AS2 (rp,%2,%0));
4212 }
4213
4214 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
4215 {
4216 if (STACK_TOP_P (operands[0]))
4217 return strcat (buf, AS2 (rp,%0,%1));
4218 else
4219 return strcat (buf, AS2 (p,%1,%0));
4220 }
4221
4222 if (STACK_TOP_P (operands[0]))
4223 {
4224 if (STACK_TOP_P (operands[1]))
4225 return strcat (buf, AS2C (%y2,%0));
4226 else
4227 return strcat (buf, AS2 (r,%y1,%0));
4228 }
4229 else if (STACK_TOP_P (operands[1]))
4230 return strcat (buf, AS2C (%1,%0));
4231 else
4232 return strcat (buf, AS2 (r,%2,%0));
4233
4234 default:
4235 abort ();
4236 }
4237 }
4238 \f
4239 /* Output code for INSN to convert a float to a signed int. OPERANDS
4240 are the insn operands. The output may be SFmode or DFmode and the
4241 input operand may be SImode or DImode. As a special case, make sure
4242 that the 387 stack top dies if the output mode is DImode, because the
4243 hardware requires this. */
4244
4245 char *
4246 output_fix_trunc (insn, operands)
4247 rtx insn;
4248 rtx *operands;
4249 {
4250 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4251 rtx xops[2];
4252
4253 if (! STACK_TOP_P (operands[1]))
4254 abort ();
4255
4256 xops[0] = GEN_INT (12);
4257 xops[1] = operands[4];
4258
4259 output_asm_insn (AS1 (fnstc%W2,%2), operands);
4260 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
4261 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
4262 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
4263 output_asm_insn (AS1 (fldc%W3,%3), operands);
4264
4265 if (NON_STACK_REG_P (operands[0]))
4266 output_to_reg (operands[0], stack_top_dies, operands[3]);
4267
4268 else if (GET_CODE (operands[0]) == MEM)
4269 {
4270 if (stack_top_dies)
4271 output_asm_insn (AS1 (fistp%z0,%0), operands);
4272 else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
4273 {
4274 /* There is no DImode version of this without a stack pop, so
4275 we must emulate it. It doesn't matter much what the second
4276 instruction is, because the value being pushed on the FP stack
4277 is not used except for the following stack popping store.
4278 This case can only happen without optimization, so it doesn't
4279 matter that it is inefficient. */
4280 output_asm_insn (AS1 (fistp%z0,%0), operands);
4281 output_asm_insn (AS1 (fild%z0,%0), operands);
4282 }
4283 else
4284 output_asm_insn (AS1 (fist%z0,%0), operands);
4285 }
4286 else
4287 abort ();
4288
4289 return AS1 (fldc%W2,%2);
4290 }
4291 \f
4292 /* Output code for INSN to compare OPERANDS. The two operands might
4293 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4294 expression. If the compare is in mode CCFPEQmode, use an opcode that
4295 will not fault if a qNaN is present. */
4296
4297 char *
4298 output_float_compare (insn, operands)
4299 rtx insn;
4300 rtx *operands;
4301 {
4302 int stack_top_dies;
4303 rtx body = XVECEXP (PATTERN (insn), 0, 0);
4304 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4305 rtx tmp;
4306
4307 if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4308 {
4309 cc_status.flags |= CC_FCOMI;
4310 cc_prev_status.flags &= ~CC_TEST_AX;
4311 }
4312
4313 if (! STACK_TOP_P (operands[0]))
4314 {
4315 tmp = operands[0];
4316 operands[0] = operands[1];
4317 operands[1] = tmp;
4318 cc_status.flags |= CC_REVERSED;
4319 }
4320
4321 if (! STACK_TOP_P (operands[0]))
4322 abort ();
4323
4324 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4325
4326 if (STACK_REG_P (operands[1])
4327 && stack_top_dies
4328 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4329 && REGNO (operands[1]) != FIRST_STACK_REG)
4330 {
4331 /* If both the top of the 387 stack dies, and the other operand
4332 is also a stack register that dies, then this must be a
4333 `fcompp' float compare */
4334
4335 if (unordered_compare)
4336 {
4337 if (cc_status.flags & CC_FCOMI)
4338 {
4339 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4340 output_asm_insn (AS1 (fstp, %y0), operands);
4341 return "";
4342 }
4343 else
4344 output_asm_insn ("fucompp", operands);
4345 }
4346 else
4347 {
4348 if (cc_status.flags & CC_FCOMI)
4349 {
4350 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4351 output_asm_insn (AS1 (fstp, %y0), operands);
4352 return "";
4353 }
4354 else
4355 output_asm_insn ("fcompp", operands);
4356 }
4357 }
4358 else
4359 {
4360 static char buf[100];
4361
4362 /* Decide if this is the integer or float compare opcode, or the
4363 unordered float compare. */
4364
4365 if (unordered_compare)
4366 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4367 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4368 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4369 else
4370 strcpy (buf, "ficom");
4371
4372 /* Modify the opcode if the 387 stack is to be popped. */
4373
4374 if (stack_top_dies)
4375 strcat (buf, "p");
4376
4377 if (NON_STACK_REG_P (operands[1]))
4378 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4379 else if (cc_status.flags & CC_FCOMI)
4380 {
4381 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4382 return "";
4383 }
4384 else
4385 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4386 }
4387
4388 /* Now retrieve the condition code. */
4389
4390 return output_fp_cc0_set (insn);
4391 }
4392 \f
4393 /* Output opcodes to transfer the results of FP compare or test INSN
4394 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4395 result of the compare or test is unordered, no comparison operator
4396 succeeds except NE. Return an output template, if any. */
4397
4398 char *
4399 output_fp_cc0_set (insn)
4400 rtx insn;
4401 {
4402 rtx xops[3];
4403 rtx next;
4404 enum rtx_code code;
4405
4406 xops[0] = gen_rtx_REG (HImode, 0);
4407 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4408
4409 if (! TARGET_IEEE_FP)
4410 {
4411 if (!(cc_status.flags & CC_REVERSED))
4412 {
4413 next = next_cc0_user (insn);
4414
4415 if (GET_CODE (next) == JUMP_INSN
4416 && GET_CODE (PATTERN (next)) == SET
4417 && SET_DEST (PATTERN (next)) == pc_rtx
4418 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4419 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4420 else if (GET_CODE (PATTERN (next)) == SET)
4421 code = GET_CODE (SET_SRC (PATTERN (next)));
4422 else
4423 return "sahf";
4424
4425 if (code == GT || code == LT || code == EQ || code == NE
4426 || code == LE || code == GE)
4427 {
4428 /* We will test eax directly. */
4429 cc_status.flags |= CC_TEST_AX;
4430 return "";
4431 }
4432 }
4433
4434 return "sahf";
4435 }
4436
4437 next = next_cc0_user (insn);
4438 if (next == NULL_RTX)
4439 abort ();
4440
4441 if (GET_CODE (next) == JUMP_INSN
4442 && GET_CODE (PATTERN (next)) == SET
4443 && SET_DEST (PATTERN (next)) == pc_rtx
4444 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4445 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4446 else if (GET_CODE (PATTERN (next)) == SET)
4447 {
4448 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4449 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4450 else
4451 code = GET_CODE (SET_SRC (PATTERN (next)));
4452 }
4453
4454 else if (GET_CODE (PATTERN (next)) == PARALLEL
4455 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4456 {
4457 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4458 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4459 else
4460 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4461 }
4462 else
4463 abort ();
4464
4465 xops[0] = gen_rtx_REG (QImode, 0);
4466
4467 switch (code)
4468 {
4469 case GT:
4470 xops[1] = GEN_INT (0x45);
4471 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4472 /* je label */
4473 break;
4474
4475 case LT:
4476 xops[1] = GEN_INT (0x45);
4477 xops[2] = GEN_INT (0x01);
4478 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4479 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4480 /* je label */
4481 break;
4482
4483 case GE:
4484 xops[1] = GEN_INT (0x05);
4485 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4486 /* je label */
4487 break;
4488
4489 case LE:
4490 xops[1] = GEN_INT (0x45);
4491 xops[2] = GEN_INT (0x40);
4492 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4493 output_asm_insn (AS1 (dec%B0,%h0), xops);
4494 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4495 /* jb label */
4496 break;
4497
4498 case EQ:
4499 xops[1] = GEN_INT (0x45);
4500 xops[2] = GEN_INT (0x40);
4501 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4502 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4503 /* je label */
4504 break;
4505
4506 case NE:
4507 xops[1] = GEN_INT (0x44);
4508 xops[2] = GEN_INT (0x40);
4509 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4510 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4511 /* jne label */
4512 break;
4513
4514 case GTU:
4515 case LTU:
4516 case GEU:
4517 case LEU:
4518 default:
4519 abort ();
4520 }
4521
4522 return "";
4523 }
4524 \f
4525 #define MAX_386_STACK_LOCALS 2
4526
4527 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4528
4529 /* Define the structure for the machine field in struct function. */
4530 struct machine_function
4531 {
4532 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4533 rtx pic_label_rtx;
4534 char pic_label_name[256];
4535 };
4536
4537 /* Functions to save and restore i386_stack_locals.
4538 These will be called, via pointer variables,
4539 from push_function_context and pop_function_context. */
4540
4541 void
4542 save_386_machine_status (p)
4543 struct function *p;
4544 {
4545 p->machine
4546 = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4547 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4548 sizeof i386_stack_locals);
4549 p->machine->pic_label_rtx = pic_label_rtx;
4550 bcopy (pic_label_name, p->machine->pic_label_name, 256);
4551 }
4552
4553 void
4554 restore_386_machine_status (p)
4555 struct function *p;
4556 {
4557 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4558 sizeof i386_stack_locals);
4559 pic_label_rtx = p->machine->pic_label_rtx;
4560 bcopy (p->machine->pic_label_name, pic_label_name, 256);
4561 free (p->machine);
4562 p->machine = NULL;
4563 }
4564
4565 /* Clear stack slot assignments remembered from previous functions.
4566 This is called from INIT_EXPANDERS once before RTL is emitted for each
4567 function. */
4568
4569 void
4570 clear_386_stack_locals ()
4571 {
4572 enum machine_mode mode;
4573 int n;
4574
4575 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4576 mode = (enum machine_mode) ((int) mode + 1))
4577 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4578 i386_stack_locals[(int) mode][n] = NULL_RTX;
4579
4580 pic_label_rtx = NULL_RTX;
4581 bzero (pic_label_name, 256);
4582 /* Arrange to save and restore i386_stack_locals around nested functions. */
4583 save_machine_status = save_386_machine_status;
4584 restore_machine_status = restore_386_machine_status;
4585 }
4586
4587 /* Return a MEM corresponding to a stack slot with mode MODE.
4588 Allocate a new slot if necessary.
4589
4590 The RTL for a function can have several slots available: N is
4591 which slot to use. */
4592
4593 rtx
4594 assign_386_stack_local (mode, n)
4595 enum machine_mode mode;
4596 int n;
4597 {
4598 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4599 abort ();
4600
4601 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4602 i386_stack_locals[(int) mode][n]
4603 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4604
4605 return i386_stack_locals[(int) mode][n];
4606 }
4607 \f
4608 int is_mul(op,mode)
4609 register rtx op;
4610 enum machine_mode mode ATTRIBUTE_UNUSED;
4611 {
4612 return (GET_CODE (op) == MULT);
4613 }
4614
4615 int is_div(op,mode)
4616 register rtx op;
4617 enum machine_mode mode ATTRIBUTE_UNUSED;
4618 {
4619 return (GET_CODE (op) == DIV);
4620 }
4621 \f
4622 #ifdef NOTYET
4623 /* Create a new copy of an rtx.
4624 Recursively copies the operands of the rtx,
4625 except for those few rtx codes that are sharable.
4626 Doesn't share CONST */
4627
4628 rtx
4629 copy_all_rtx (orig)
4630 register rtx orig;
4631 {
4632 register rtx copy;
4633 register int i, j;
4634 register RTX_CODE code;
4635 register char *format_ptr;
4636
4637 code = GET_CODE (orig);
4638
4639 switch (code)
4640 {
4641 case REG:
4642 case QUEUED:
4643 case CONST_INT:
4644 case CONST_DOUBLE:
4645 case SYMBOL_REF:
4646 case CODE_LABEL:
4647 case PC:
4648 case CC0:
4649 case SCRATCH:
4650 /* SCRATCH must be shared because they represent distinct values. */
4651 return orig;
4652
4653 #if 0
4654 case CONST:
4655 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4656 a LABEL_REF, it isn't sharable. */
4657 if (GET_CODE (XEXP (orig, 0)) == PLUS
4658 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4659 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4660 return orig;
4661 break;
4662 #endif
4663 /* A MEM with a constant address is not sharable. The problem is that
4664 the constant address may need to be reloaded. If the mem is shared,
4665 then reloading one copy of this mem will cause all copies to appear
4666 to have been reloaded. */
4667 }
4668
4669 copy = rtx_alloc (code);
4670 PUT_MODE (copy, GET_MODE (orig));
4671 copy->in_struct = orig->in_struct;
4672 copy->volatil = orig->volatil;
4673 copy->unchanging = orig->unchanging;
4674 copy->integrated = orig->integrated;
4675 /* intel1 */
4676 copy->is_spill_rtx = orig->is_spill_rtx;
4677
4678 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4679
4680 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4681 {
4682 switch (*format_ptr++)
4683 {
4684 case 'e':
4685 XEXP (copy, i) = XEXP (orig, i);
4686 if (XEXP (orig, i) != NULL)
4687 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4688 break;
4689
4690 case '0':
4691 case 'u':
4692 XEXP (copy, i) = XEXP (orig, i);
4693 break;
4694
4695 case 'E':
4696 case 'V':
4697 XVEC (copy, i) = XVEC (orig, i);
4698 if (XVEC (orig, i) != NULL)
4699 {
4700 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4701 for (j = 0; j < XVECLEN (copy, i); j++)
4702 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4703 }
4704 break;
4705
4706 case 'w':
4707 XWINT (copy, i) = XWINT (orig, i);
4708 break;
4709
4710 case 'i':
4711 XINT (copy, i) = XINT (orig, i);
4712 break;
4713
4714 case 's':
4715 case 'S':
4716 XSTR (copy, i) = XSTR (orig, i);
4717 break;
4718
4719 default:
4720 abort ();
4721 }
4722 }
4723 return copy;
4724 }
4725
4726 \f
4727 /* Try to rewrite a memory address to make it valid */
4728
4729 void
4730 rewrite_address (mem_rtx)
4731 rtx mem_rtx;
4732 {
4733 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4734 int scale = 1;
4735 int offset_adjust = 0;
4736 int was_only_offset = 0;
4737 rtx mem_addr = XEXP (mem_rtx, 0);
4738 char *storage = oballoc (0);
4739 int in_struct = 0;
4740 int is_spill_rtx = 0;
4741
4742 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4743 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4744
4745 if (GET_CODE (mem_addr) == PLUS
4746 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4747 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4748 {
4749 /* This part is utilized by the combiner. */
4750 ret_rtx
4751 = gen_rtx (PLUS, GET_MODE (mem_addr),
4752 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4753 XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4754 XEXP (XEXP (mem_addr, 1), 1));
4755
4756 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4757 {
4758 XEXP (mem_rtx, 0) = ret_rtx;
4759 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4760 return;
4761 }
4762
4763 obfree (storage);
4764 }
4765
4766 /* This part is utilized by loop.c.
4767 If the address contains PLUS (reg,const) and this pattern is invalid
4768 in this case - try to rewrite the address to make it valid. */
4769 storage = oballoc (0);
4770 index_rtx = base_rtx = offset_rtx = NULL;
4771
4772 /* Find the base index and offset elements of the memory address. */
4773 if (GET_CODE (mem_addr) == PLUS)
4774 {
4775 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4776 {
4777 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4778 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4779 else
4780 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4781 }
4782
4783 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4784 {
4785 index_rtx = XEXP (mem_addr, 0);
4786 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4787 base_rtx = XEXP (mem_addr, 1);
4788 else
4789 offset_rtx = XEXP (mem_addr, 1);
4790 }
4791
4792 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4793 {
4794 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4795 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4796 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4797 == REG)
4798 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4799 == CONST_INT)
4800 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4801 == CONST_INT)
4802 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4803 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4804 {
4805 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4806 offset_rtx = XEXP (mem_addr, 1);
4807 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4808 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4809 }
4810 else
4811 {
4812 offset_rtx = XEXP (mem_addr, 1);
4813 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4814 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4815 }
4816 }
4817
4818 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4819 {
4820 was_only_offset = 1;
4821 index_rtx = NULL;
4822 base_rtx = NULL;
4823 offset_rtx = XEXP (mem_addr, 1);
4824 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4825 if (offset_adjust == 0)
4826 {
4827 XEXP (mem_rtx, 0) = offset_rtx;
4828 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4829 return;
4830 }
4831 }
4832 else
4833 {
4834 obfree (storage);
4835 return;
4836 }
4837 }
4838 else if (GET_CODE (mem_addr) == MULT)
4839 index_rtx = mem_addr;
4840 else
4841 {
4842 obfree (storage);
4843 return;
4844 }
4845
4846 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4847 {
4848 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4849 {
4850 obfree (storage);
4851 return;
4852 }
4853
4854 scale_rtx = XEXP (index_rtx, 1);
4855 scale = INTVAL (scale_rtx);
4856 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4857 }
4858
4859 /* Now find which of the elements are invalid and try to fix them. */
4860 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4861 {
4862 offset_adjust = INTVAL (index_rtx) * scale;
4863
4864 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4865 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4866 else if (offset_rtx == 0)
4867 offset_rtx = const0_rtx;
4868
4869 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4870 XEXP (mem_rtx, 0) = offset_rtx;
4871 return;
4872 }
4873
4874 if (base_rtx && GET_CODE (base_rtx) == PLUS
4875 && GET_CODE (XEXP (base_rtx, 0)) == REG
4876 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4877 {
4878 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4879 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4880 }
4881
4882 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4883 {
4884 offset_adjust += INTVAL (base_rtx);
4885 base_rtx = NULL;
4886 }
4887
4888 if (index_rtx && GET_CODE (index_rtx) == PLUS
4889 && GET_CODE (XEXP (index_rtx, 0)) == REG
4890 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4891 {
4892 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4893 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4894 }
4895
4896 if (index_rtx)
4897 {
4898 if (! LEGITIMATE_INDEX_P (index_rtx)
4899 && ! (index_rtx == stack_pointer_rtx && scale == 1
4900 && base_rtx == NULL))
4901 {
4902 obfree (storage);
4903 return;
4904 }
4905 }
4906
4907 if (base_rtx)
4908 {
4909 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4910 {
4911 obfree (storage);
4912 return;
4913 }
4914 }
4915
4916 if (offset_adjust != 0)
4917 {
4918 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4919 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4920 else
4921 offset_rtx = const0_rtx;
4922
4923 if (index_rtx)
4924 {
4925 if (base_rtx)
4926 {
4927 if (scale != 1)
4928 {
4929 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4930 gen_rtx (MULT, GET_MODE (index_rtx),
4931 index_rtx, scale_rtx),
4932 base_rtx);
4933
4934 if (GET_CODE (offset_rtx) != CONST_INT
4935 || INTVAL (offset_rtx) != 0)
4936 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4937 ret_rtx, offset_rtx);
4938 }
4939 else
4940 {
4941 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4942 index_rtx, base_rtx);
4943
4944 if (GET_CODE (offset_rtx) != CONST_INT
4945 || INTVAL (offset_rtx) != 0)
4946 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4947 ret_rtx, offset_rtx);
4948 }
4949 }
4950 else
4951 {
4952 if (scale != 1)
4953 {
4954 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4955 index_rtx, scale_rtx);
4956
4957 if (GET_CODE (offset_rtx) != CONST_INT
4958 || INTVAL (offset_rtx) != 0)
4959 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4960 ret_rtx, offset_rtx);
4961 }
4962 else
4963 {
4964 if (GET_CODE (offset_rtx) == CONST_INT
4965 && INTVAL (offset_rtx) == 0)
4966 ret_rtx = index_rtx;
4967 else
4968 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4969 index_rtx, offset_rtx);
4970 }
4971 }
4972 }
4973 else
4974 {
4975 if (base_rtx)
4976 {
4977 if (GET_CODE (offset_rtx) == CONST_INT
4978 && INTVAL (offset_rtx) == 0)
4979 ret_rtx = base_rtx;
4980 else
4981 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4982 offset_rtx);
4983 }
4984 else if (was_only_offset)
4985 ret_rtx = offset_rtx;
4986 else
4987 {
4988 obfree (storage);
4989 return;
4990 }
4991 }
4992
4993 XEXP (mem_rtx, 0) = ret_rtx;
4994 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4995 return;
4996 }
4997 else
4998 {
4999 obfree (storage);
5000 return;
5001 }
5002 }
5003 #endif /* NOTYET */
5004 \f
5005 /* Return 1 if the first insn to set cc before INSN also sets the register
5006 REG_RTX; otherwise return 0. */
5007 int
5008 last_to_set_cc (reg_rtx, insn)
5009 rtx reg_rtx, insn;
5010 {
5011 rtx prev_insn = PREV_INSN (insn);
5012
5013 while (prev_insn)
5014 {
5015 if (GET_CODE (prev_insn) == NOTE)
5016 ;
5017
5018 else if (GET_CODE (prev_insn) == INSN)
5019 {
5020 if (GET_CODE (PATTERN (prev_insn)) != SET)
5021 return (0);
5022
5023 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
5024 {
5025 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
5026 return (1);
5027
5028 return (0);
5029 }
5030
5031 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
5032 return (0);
5033 }
5034
5035 else
5036 return (0);
5037
5038 prev_insn = PREV_INSN (prev_insn);
5039 }
5040
5041 return (0);
5042 }
5043 \f
5044 int
5045 doesnt_set_condition_code (pat)
5046 rtx pat;
5047 {
5048 switch (GET_CODE (pat))
5049 {
5050 case MEM:
5051 case REG:
5052 return 1;
5053
5054 default:
5055 return 0;
5056
5057 }
5058 }
5059 \f
5060 int
5061 sets_condition_code (pat)
5062 rtx pat;
5063 {
5064 switch (GET_CODE (pat))
5065 {
5066 case PLUS:
5067 case MINUS:
5068 case AND:
5069 case IOR:
5070 case XOR:
5071 case NOT:
5072 case NEG:
5073 case MULT:
5074 case DIV:
5075 case MOD:
5076 case UDIV:
5077 case UMOD:
5078 return 1;
5079
5080 default:
5081 return (0);
5082 }
5083 }
5084 \f
5085 int
5086 str_immediate_operand (op, mode)
5087 register rtx op;
5088 enum machine_mode mode ATTRIBUTE_UNUSED;
5089 {
5090 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
5091 return 1;
5092
5093 return 0;
5094 }
5095 \f
5096 int
5097 is_fp_insn (insn)
5098 rtx insn;
5099 {
5100 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5101 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5102 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5103 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
5104 return 1;
5105
5106 return 0;
5107 }
5108
5109 /* Return 1 if the mode of the SET_DEST of insn is floating point
5110 and it is not an fld or a move from memory to memory.
5111 Otherwise return 0 */
5112
5113 int
5114 is_fp_dest (insn)
5115 rtx insn;
5116 {
5117 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5118 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5119 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5120 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5121 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5122 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
5123 && GET_CODE (SET_SRC (PATTERN (insn))) != MEM)
5124 return 1;
5125
5126 return 0;
5127 }
5128
5129 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
5130 memory and the source is a register. */
5131
5132 int
5133 is_fp_store (insn)
5134 rtx insn;
5135 {
5136 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5137 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5138 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5139 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5140 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
5141 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
5142 return 1;
5143
5144 return 0;
5145 }
5146 \f
5147 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
5148 or index to reference memory.
5149 otherwise return 0 */
5150
5151 int
5152 agi_dependent (insn, dep_insn)
5153 rtx insn, dep_insn;
5154 {
5155 if (GET_CODE (dep_insn) == INSN
5156 && GET_CODE (PATTERN (dep_insn)) == SET
5157 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
5158 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
5159
5160 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
5161 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
5162 && push_operand (SET_DEST (PATTERN (dep_insn)),
5163 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
5164 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
5165
5166 return 0;
5167 }
5168 \f
5169 /* Return 1 if reg is used in rtl as a base or index for a memory ref
5170 otherwise return 0. */
5171
5172 int
5173 reg_mentioned_in_mem (reg, rtl)
5174 rtx reg, rtl;
5175 {
5176 register char *fmt;
5177 register int i, j;
5178 register enum rtx_code code;
5179
5180 if (rtl == NULL)
5181 return 0;
5182
5183 code = GET_CODE (rtl);
5184
5185 switch (code)
5186 {
5187 case HIGH:
5188 case CONST_INT:
5189 case CONST:
5190 case CONST_DOUBLE:
5191 case SYMBOL_REF:
5192 case LABEL_REF:
5193 case PC:
5194 case CC0:
5195 case SUBREG:
5196 return 0;
5197 default:
5198 break;
5199 }
5200
5201 if (code == MEM && reg_mentioned_p (reg, rtl))
5202 return 1;
5203
5204 fmt = GET_RTX_FORMAT (code);
5205 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5206 {
5207 if (fmt[i] == 'E')
5208 {
5209 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
5210 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
5211 return 1;
5212 }
5213
5214 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
5215 return 1;
5216 }
5217
5218 return 0;
5219 }
5220 \f
5221 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
5222
5223 operands[0] = result, initialized with the startaddress
5224 operands[1] = alignment of the address.
5225 operands[2] = scratch register, initialized with the startaddress when
5226 not aligned, otherwise undefined
5227
5228 This is just the body. It needs the initialisations mentioned above and
5229 some address computing at the end. These things are done in i386.md. */
5230
5231 char *
5232 output_strlen_unroll (operands)
5233 rtx operands[];
5234 {
5235 rtx xops[18];
5236
5237 xops[0] = operands[0]; /* Result */
5238 /* operands[1]; * Alignment */
5239 xops[1] = operands[2]; /* Scratch */
5240 xops[2] = GEN_INT (0);
5241 xops[3] = GEN_INT (2);
5242 xops[4] = GEN_INT (3);
5243 xops[5] = GEN_INT (4);
5244 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
5245 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
5246 xops[8] = gen_label_rtx (); /* label of main loop */
5247
5248 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
5249 xops[9] = gen_label_rtx (); /* pentium optimisation */
5250
5251 xops[10] = gen_label_rtx (); /* end label 2 */
5252 xops[11] = gen_label_rtx (); /* end label 1 */
5253 xops[12] = gen_label_rtx (); /* end label */
5254 /* xops[13] * Temporary used */
5255 xops[14] = GEN_INT (0xff);
5256 xops[15] = GEN_INT (0xff00);
5257 xops[16] = GEN_INT (0xff0000);
5258 xops[17] = GEN_INT (0xff000000);
5259
5260 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
5261
5262 /* Is there a known alignment and is it less than 4? */
5263 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
5264 {
5265 /* Is there a known alignment and is it not 2? */
5266 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5267 {
5268 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5269 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5270
5271 /* Leave just the 3 lower bits.
5272 If this is a q-register, then the high part is used later
5273 therefore use andl rather than andb. */
5274 output_asm_insn (AS2 (and%L1,%4,%1), xops);
5275
5276 /* Is aligned to 4-byte address when zero */
5277 output_asm_insn (AS1 (je,%l8), xops);
5278
5279 /* Side-effect even Parity when %eax == 3 */
5280 output_asm_insn (AS1 (jp,%6), xops);
5281
5282 /* Is it aligned to 2 bytes ? */
5283 if (QI_REG_P (xops[1]))
5284 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5285 else
5286 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5287
5288 output_asm_insn (AS1 (je,%7), xops);
5289 }
5290 else
5291 {
5292 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5293 check if is aligned to 4 - byte. */
5294 output_asm_insn (AS2 (and%L1,%3,%1), xops);
5295
5296 /* Is aligned to 4-byte address when zero */
5297 output_asm_insn (AS1 (je,%l8), xops);
5298 }
5299
5300 xops[13] = gen_rtx_MEM (QImode, xops[0]);
5301
5302 /* Now compare the bytes; compare with the high part of a q-reg
5303 gives shorter code. */
5304 if (QI_REG_P (xops[1]))
5305 {
5306 /* Compare the first n unaligned byte on a byte per byte basis. */
5307 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5308
5309 /* When zero we reached the end. */
5310 output_asm_insn (AS1 (je,%l12), xops);
5311
5312 /* Increment the address. */
5313 output_asm_insn (AS1 (inc%L0,%0), xops);
5314
5315 /* Not needed with an alignment of 2 */
5316 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5317 {
5318 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5319 CODE_LABEL_NUMBER (xops[7]));
5320 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5321 output_asm_insn (AS1 (je,%l12), xops);
5322 output_asm_insn (AS1 (inc%L0,%0), xops);
5323
5324 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5325 CODE_LABEL_NUMBER (xops[6]));
5326 }
5327
5328 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5329 }
5330 else
5331 {
5332 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5333 output_asm_insn (AS1 (je,%l12), xops);
5334 output_asm_insn (AS1 (inc%L0,%0), xops);
5335
5336 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5337 CODE_LABEL_NUMBER (xops[7]));
5338 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5339 output_asm_insn (AS1 (je,%l12), xops);
5340 output_asm_insn (AS1 (inc%L0,%0), xops);
5341
5342 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5343 CODE_LABEL_NUMBER (xops[6]));
5344 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5345 }
5346
5347 output_asm_insn (AS1 (je,%l12), xops);
5348 output_asm_insn (AS1 (inc%L0,%0), xops);
5349 }
5350
5351 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5352 align this loop. It gives only huge programs, but does not help to
5353 speed up. */
5354 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5355
5356 xops[13] = gen_rtx_MEM (SImode, xops[0]);
5357 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5358
5359 if (QI_REG_P (xops[1]))
5360 {
5361 /* On i586 it is faster to combine the hi- and lo- part as
5362 a kind of lookahead. If anding both yields zero, then one
5363 of both *could* be zero, otherwise none of both is zero;
5364 this saves one instruction, on i486 this is slower
5365 tested with P-90, i486DX2-66, AMD486DX2-66 */
5366 if (TARGET_PENTIUM)
5367 {
5368 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5369 output_asm_insn (AS1 (jne,%l9), xops);
5370 }
5371
5372 /* Check first byte. */
5373 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5374 output_asm_insn (AS1 (je,%l12), xops);
5375
5376 /* Check second byte. */
5377 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5378 output_asm_insn (AS1 (je,%l11), xops);
5379
5380 if (TARGET_PENTIUM)
5381 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5382 CODE_LABEL_NUMBER (xops[9]));
5383 }
5384
5385 else
5386 {
5387 /* Check first byte. */
5388 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5389 output_asm_insn (AS1 (je,%l12), xops);
5390
5391 /* Check second byte. */
5392 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5393 output_asm_insn (AS1 (je,%l11), xops);
5394 }
5395
5396 /* Check third byte. */
5397 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5398 output_asm_insn (AS1 (je,%l10), xops);
5399
5400 /* Check fourth byte and increment address. */
5401 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5402 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5403 output_asm_insn (AS1 (jne,%l8), xops);
5404
5405 /* Now generate fixups when the compare stops within a 4-byte word. */
5406 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5407
5408 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5409 output_asm_insn (AS1 (inc%L0,%0), xops);
5410
5411 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5412 output_asm_insn (AS1 (inc%L0,%0), xops);
5413
5414 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5415
5416 return "";
5417 }
5418
5419 char *
5420 output_fp_conditional_move (which_alternative, operands)
5421 int which_alternative;
5422 rtx operands[];
5423 {
5424 switch (which_alternative)
5425 {
5426 case 0:
5427 /* r <- cond ? arg : r */
5428 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5429 break;
5430
5431 case 1:
5432 /* r <- cond ? r : arg */
5433 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5434 break;
5435
5436 case 2:
5437 /* r <- cond ? r : arg */
5438 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5439 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5440 break;
5441
5442 default:
5443 abort ();
5444 }
5445
5446 return "";
5447 }
5448
5449 char *
5450 output_int_conditional_move (which_alternative, operands)
5451 int which_alternative;
5452 rtx operands[];
5453 {
5454 int code = GET_CODE (operands[1]);
5455 enum machine_mode mode;
5456 rtx xops[4];
5457
5458 /* This is very tricky. We have to do it right. For a code segement
5459 like:
5460
5461 int foo, bar;
5462 ....
5463 foo = foo - x;
5464 if (foo >= 0)
5465 bar = y;
5466
5467 final_scan_insn () may delete the insn which sets CC. We have to
5468 tell final_scan_insn () if it should be reinserted. When CODE is
5469 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5470 NULL_PTR to tell final to reinsert the test insn because the
5471 conditional move cannot be handled properly without it. */
5472 if ((code == GT || code == LE)
5473 && (cc_prev_status.flags & CC_NO_OVERFLOW))
5474 return NULL_PTR;
5475
5476 mode = GET_MODE (operands [0]);
5477 if (mode == DImode)
5478 {
5479 xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5480 xops [1] = operands [1];
5481 xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5482 xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5483 }
5484
5485 switch (which_alternative)
5486 {
5487 case 0:
5488 /* r <- cond ? arg : r */
5489 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5490 if (mode == DImode)
5491 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5492 break;
5493
5494 case 1:
5495 /* r <- cond ? r : arg */
5496 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5497 if (mode == DImode)
5498 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5499 break;
5500
5501 case 2:
5502 /* rm <- cond ? arg1 : arg2 */
5503 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5504 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5505 if (mode == DImode)
5506 {
5507 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5508 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5509 }
5510 break;
5511
5512 default:
5513 abort ();
5514 }
5515
5516 return "";
5517 }
5518
5519 int
5520 x86_adjust_cost (insn, link, dep_insn, cost)
5521 rtx insn, link, dep_insn;
5522 int cost;
5523 {
5524 rtx next_inst;
5525
5526 if (GET_CODE (dep_insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
5527 return 0;
5528
5529 if (GET_CODE (dep_insn) == INSN
5530 && GET_CODE (PATTERN (dep_insn)) == SET
5531 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG
5532 && GET_CODE (insn) == INSN
5533 && GET_CODE (PATTERN (insn)) == SET
5534 && !reg_overlap_mentioned_p (SET_DEST (PATTERN (dep_insn)),
5535 SET_SRC (PATTERN (insn))))
5536 return 0; /* ??? */
5537
5538
5539 switch (ix86_cpu)
5540 {
5541 case PROCESSOR_PENTIUM:
5542 if (cost != 0 && is_fp_insn (insn) && is_fp_insn (dep_insn)
5543 && !is_fp_dest (dep_insn))
5544 return 0;
5545
5546 if (agi_dependent (insn, dep_insn))
5547 return 3;
5548
5549 if (GET_CODE (insn) == INSN
5550 && GET_CODE (PATTERN (insn)) == SET
5551 && SET_DEST (PATTERN (insn)) == cc0_rtx
5552 && (next_inst = next_nonnote_insn (insn))
5553 && GET_CODE (next_inst) == JUMP_INSN)
5554 /* compare probably paired with jump */
5555 return 0;
5556 break;
5557
5558 case PROCESSOR_K6:
5559 default:
5560 if (!is_fp_dest (dep_insn))
5561 {
5562 if(!agi_dependent (insn, dep_insn))
5563 return 0;
5564 if (TARGET_486)
5565 return 2;
5566 }
5567 else
5568 if (is_fp_store (insn) && is_fp_insn (dep_insn)
5569 && NEXT_INSN (insn) && NEXT_INSN (NEXT_INSN (insn))
5570 && NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))
5571 && (GET_CODE (NEXT_INSN (insn)) == INSN)
5572 && (GET_CODE (NEXT_INSN (NEXT_INSN (insn))) == JUMP_INSN)
5573 && (GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))) == NOTE)
5574 && (NOTE_LINE_NUMBER (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn))))
5575 == NOTE_INSN_LOOP_END))
5576 return 3;
5577 break;
5578 }
5579
5580 return cost;
5581 }
This page took 0.301031 seconds and 5 git commands to generate.