]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
(override_options): Set defaults for -march and -mcpu.
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <stdio.h>
22 #include <setjmp.h>
23 #include <ctype.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
46 #endif
47
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
50 #endif
51
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
53 {
54 reg_p,
55 mem_p,
56 imm_p
57 };
58
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
68 };
69
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
78 };
79
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 3, /* variable shift costs */
84 1, /* constant shift costs */
85 12, /* cost of starting a multiply */
86 1, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
88 };
89
90 struct processor_costs *ix86_cost = &pentium_cost;
91
92 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
93
94 extern FILE *asm_out_file;
95 extern char *strcat ();
96
97 char *singlemove_string ();
98 char *output_move_const_single ();
99 char *output_fp_cc0_set ();
100
101 char *hi_reg_name[] = HI_REGISTER_NAMES;
102 char *qi_reg_name[] = QI_REGISTER_NAMES;
103 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
104
105 /* Array of the smallest class containing reg number REGNO, indexed by
106 REGNO. Used by REGNO_REG_CLASS in i386.h. */
107
108 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
109 {
110 /* ax, dx, cx, bx */
111 AREG, DREG, CREG, BREG,
112 /* si, di, bp, sp */
113 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
114 /* FP registers */
115 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
116 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
117 /* arg pointer */
118 INDEX_REGS
119 };
120
121 /* Test and compare insns in i386.md store the information needed to
122 generate branch and scc insns here. */
123
124 struct rtx_def *i386_compare_op0 = NULL_RTX;
125 struct rtx_def *i386_compare_op1 = NULL_RTX;
126 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
127
128 /* which cpu are we scheduling for */
129 enum processor_type ix86_cpu;
130
131 /* which instruction set architecture to use. */
132 int ix86_arch;
133
134 /* Strings to hold which cpu and instruction set architecture to use. */
135 char *ix86_cpu_string; /* for -mcpu=<xxx> */
136 char *ix86_arch_string; /* for -march=<xxx> */
137
138 /* Register allocation order */
139 char *i386_reg_alloc_order;
140 static char regs_allocated[FIRST_PSEUDO_REGISTER];
141
142 /* # of registers to use to pass arguments. */
143 char *i386_regparm_string; /* # registers to use to pass args */
144 int i386_regparm; /* i386_regparm_string as a number */
145
146 /* Alignment to use for loops and jumps */
147 char *i386_align_loops_string; /* power of two alignment for loops */
148 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
149 char *i386_align_funcs_string; /* power of two alignment for functions */
150 char *i386_branch_cost_string; /* values 1-5: see jump.c */
151
152 int i386_align_loops; /* power of two alignment for loops */
153 int i386_align_jumps; /* power of two alignment for non-loop jumps */
154 int i386_align_funcs; /* power of two alignment for functions */
155 int i386_branch_cost; /* values 1-5: see jump.c */
156
157 /* Sometimes certain combinations of command options do not make
158 sense on a particular target machine. You can define a macro
159 `OVERRIDE_OPTIONS' to take account of this. This macro, if
160 defined, is executed once just after all the command options have
161 been parsed.
162
163 Don't use this macro to turn on various extra optimizations for
164 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
165
166 void
167 override_options ()
168 {
169 int ch, i, j, regno;
170 char *p;
171 int def_align;
172
173 static struct ptt
174 {
175 char *name; /* Canonical processor name. */
176 enum processor_type processor; /* Processor type enum value. */
177 struct processor_costs *cost; /* Processor costs */
178 int target_enable; /* Target flags to enable. */
179 int target_disable; /* Target flags to disable. */
180 } processor_target_table[]
181 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
182 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
183 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
184 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
185 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
186 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
187
188 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
189
190 #ifdef SUBTARGET_OVERRIDE_OPTIONS
191 SUBTARGET_OVERRIDE_OPTIONS;
192 #endif
193
194 /* Validate registers in register allocation order */
195 if (i386_reg_alloc_order)
196 {
197 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
198 {
199 switch (ch)
200 {
201 case 'a': regno = 0; break;
202 case 'd': regno = 1; break;
203 case 'c': regno = 2; break;
204 case 'b': regno = 3; break;
205 case 'S': regno = 4; break;
206 case 'D': regno = 5; break;
207 case 'B': regno = 6; break;
208
209 default: fatal ("Register '%c' is unknown", ch);
210 }
211
212 if (regs_allocated[regno])
213 fatal ("Register '%c' was already specified in the allocation order", ch);
214
215 regs_allocated[regno] = 1;
216 }
217 }
218
219 /* Get the architectural level. */
220 if (ix86_arch_string == (char *)0)
221 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
222
223 for (i = 0; i < ptt_size; i++)
224 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
225 {
226 ix86_arch = processor_target_table[i].processor;
227 if (ix86_cpu_string == (char *)0)
228 ix86_cpu_string = processor_target_table[i].name;
229 break;
230 }
231
232 if (i == ptt_size)
233 {
234 error ("bad value (%s) for -march= switch", ix86_arch_string);
235 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
236 ix86_arch = PROCESSOR_DEFAULT;
237 }
238
239 if (ix86_cpu_string == (char *)0)
240 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
241
242 for (j = 0; j < ptt_size; j++)
243 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
244 {
245 ix86_cpu = processor_target_table[j].processor;
246 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
247 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
248
249 target_flags |= processor_target_table[j].target_enable;
250 target_flags &= ~processor_target_table[j].target_disable;
251 break;
252 }
253
254 if (j == ptt_size)
255 {
256 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
257 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
258 ix86_cpu = PROCESSOR_DEFAULT;
259 }
260
261 /* Validate -mregparm= value */
262 if (i386_regparm_string)
263 {
264 i386_regparm = atoi (i386_regparm_string);
265 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
266 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
267 }
268
269 def_align = (TARGET_386) ? 2 : 4;
270
271 /* Validate -malign-loops= value, or provide default */
272 if (i386_align_loops_string)
273 {
274 i386_align_loops = atoi (i386_align_loops_string);
275 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
276 fatal ("-malign-loops=%d is not between 0 and %d",
277 i386_align_loops, MAX_CODE_ALIGN);
278 }
279 else
280 i386_align_loops = 2;
281
282 /* Validate -malign-jumps= value, or provide default */
283 if (i386_align_jumps_string)
284 {
285 i386_align_jumps = atoi (i386_align_jumps_string);
286 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
287 fatal ("-malign-jumps=%d is not between 0 and %d",
288 i386_align_jumps, MAX_CODE_ALIGN);
289 }
290 else
291 i386_align_jumps = def_align;
292
293 /* Validate -malign-functions= value, or provide default */
294 if (i386_align_funcs_string)
295 {
296 i386_align_funcs = atoi (i386_align_funcs_string);
297 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
298 fatal ("-malign-functions=%d is not between 0 and %d",
299 i386_align_funcs, MAX_CODE_ALIGN);
300 }
301 else
302 i386_align_funcs = def_align;
303
304 /* Validate -mbranch-cost= value, or provide default */
305 if (i386_branch_cost_string)
306 {
307 i386_branch_cost = atoi (i386_branch_cost_string);
308 if (i386_branch_cost < 0 || i386_branch_cost > 5)
309 fatal ("-mbranch-cost=%d is not between 0 and 5",
310 i386_branch_cost);
311 }
312 else
313 i386_branch_cost = 1;
314
315 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
316 flag_omit_frame_pointer = 1;
317
318 /* pic references don't explicitly mention pic_offset_table_rtx */
319 /* code threaded into the prologue may conflict with profiling */
320 if (flag_pic || profile_flag || profile_block_flag)
321 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
322 }
323 \f
324 /* A C statement (sans semicolon) to choose the order in which to
325 allocate hard registers for pseudo-registers local to a basic
326 block.
327
328 Store the desired register order in the array `reg_alloc_order'.
329 Element 0 should be the register to allocate first; element 1, the
330 next register; and so on.
331
332 The macro body should not assume anything about the contents of
333 `reg_alloc_order' before execution of the macro.
334
335 On most machines, it is not necessary to define this macro. */
336
337 void
338 order_regs_for_local_alloc ()
339 {
340 int i, ch, order, regno;
341
342 /* User specified the register allocation order */
343 if (i386_reg_alloc_order)
344 {
345 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
346 {
347 switch (ch)
348 {
349 case 'a': regno = 0; break;
350 case 'd': regno = 1; break;
351 case 'c': regno = 2; break;
352 case 'b': regno = 3; break;
353 case 'S': regno = 4; break;
354 case 'D': regno = 5; break;
355 case 'B': regno = 6; break;
356 }
357
358 reg_alloc_order[order++] = regno;
359 }
360
361 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
362 {
363 if (!regs_allocated[i])
364 reg_alloc_order[order++] = i;
365 }
366 }
367
368 /* If users did not specify a register allocation order, use natural order */
369 else
370 {
371 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
372 reg_alloc_order[i] = i;
373 }
374 }
375
376 \f
377 void
378 optimization_options (level)
379 int level;
380 {
381 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
382 make the problem with not enough registers even worse */
383 #ifdef INSN_SCHEDULING
384 if (level > 1)
385 flag_schedule_insns = 0;
386 #endif
387 }
388 \f
389 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
390 attribute for DECL. The attributes in ATTRIBUTES have previously been
391 assigned to DECL. */
392
393 int
394 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
395 tree decl;
396 tree attributes;
397 tree identifier;
398 tree args;
399 {
400 return 0;
401 }
402
403 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
404 attribute for TYPE. The attributes in ATTRIBUTES have previously been
405 assigned to TYPE. */
406
407 int
408 i386_valid_type_attribute_p (type, attributes, identifier, args)
409 tree type;
410 tree attributes;
411 tree identifier;
412 tree args;
413 {
414 if (TREE_CODE (type) != FUNCTION_TYPE
415 && TREE_CODE (type) != FIELD_DECL
416 && TREE_CODE (type) != TYPE_DECL)
417 return 0;
418
419 /* Stdcall attribute says callee is responsible for popping arguments
420 if they are not variable. */
421 if (is_attribute_p ("stdcall", identifier))
422 return (args == NULL_TREE);
423
424 /* Cdecl attribute says the callee is a normal C declaration */
425 if (is_attribute_p ("cdecl", identifier))
426 return (args == NULL_TREE);
427
428 /* Regparm attribute specifies how many integer arguments are to be
429 passed in registers */
430 if (is_attribute_p ("regparm", identifier))
431 {
432 tree cst;
433
434 if (!args || TREE_CODE (args) != TREE_LIST
435 || TREE_CHAIN (args) != NULL_TREE
436 || TREE_VALUE (args) == NULL_TREE)
437 return 0;
438
439 cst = TREE_VALUE (args);
440 if (TREE_CODE (cst) != INTEGER_CST)
441 return 0;
442
443 if (TREE_INT_CST_HIGH (cst) != 0
444 || TREE_INT_CST_LOW (cst) < 0
445 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
446 return 0;
447
448 return 1;
449 }
450
451 return 0;
452 }
453
454 /* Return 0 if the attributes for two types are incompatible, 1 if they
455 are compatible, and 2 if they are nearly compatible (which causes a
456 warning to be generated). */
457
458 int
459 i386_comp_type_attributes (type1, type2)
460 tree type1;
461 tree type2;
462 {
463 return 1;
464 }
465
466 \f
467 /* Value is the number of bytes of arguments automatically
468 popped when returning from a subroutine call.
469 FUNDECL is the declaration node of the function (as a tree),
470 FUNTYPE is the data type of the function (as a tree),
471 or for a library call it is an identifier node for the subroutine name.
472 SIZE is the number of bytes of arguments passed on the stack.
473
474 On the 80386, the RTD insn may be used to pop them if the number
475 of args is fixed, but if the number is variable then the caller
476 must pop them all. RTD can't be used for library calls now
477 because the library is compiled with the Unix compiler.
478 Use of RTD is a selectable option, since it is incompatible with
479 standard Unix calling sequences. If the option is not selected,
480 the caller must always pop the args.
481
482 The attribute stdcall is equivalent to RTD on a per module basis. */
483
484 int
485 i386_return_pops_args (fundecl, funtype, size)
486 tree fundecl;
487 tree funtype;
488 int size;
489 {
490 int rtd = TARGET_RTD;
491
492 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
493 return 0;
494
495 /* Cdecl functions override -mrtd, and never pop the stack */
496 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
497
498 /* Stdcall functions will pop the stack if not variable args */
499 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
500 rtd = 1;
501
502 if (rtd
503 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
504 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
505 return size;
506 }
507
508 /* Lose any fake structure return argument */
509 if (aggregate_value_p (TREE_TYPE (funtype)))
510 return GET_MODE_SIZE (Pmode);
511
512 return 0;
513 }
514
515 \f
516 /* Argument support functions. */
517
518 /* Initialize a variable CUM of type CUMULATIVE_ARGS
519 for a call to a function whose data type is FNTYPE.
520 For a library call, FNTYPE is 0. */
521
522 void
523 init_cumulative_args (cum, fntype, libname)
524 CUMULATIVE_ARGS *cum; /* argument info to initialize */
525 tree fntype; /* tree ptr for function decl */
526 rtx libname; /* SYMBOL_REF of library name or 0 */
527 {
528 static CUMULATIVE_ARGS zero_cum;
529 tree param, next_param;
530
531 if (TARGET_DEBUG_ARG)
532 {
533 fprintf (stderr, "\ninit_cumulative_args (");
534 if (fntype)
535 {
536 tree ret_type = TREE_TYPE (fntype);
537 fprintf (stderr, "fntype code = %s, ret code = %s",
538 tree_code_name[ (int)TREE_CODE (fntype) ],
539 tree_code_name[ (int)TREE_CODE (ret_type) ]);
540 }
541 else
542 fprintf (stderr, "no fntype");
543
544 if (libname)
545 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
546 }
547
548 *cum = zero_cum;
549
550 /* Set up the number of registers to use for passing arguments. */
551 cum->nregs = i386_regparm;
552 if (fntype)
553 {
554 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
555 if (attr)
556 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
557 }
558
559 /* Determine if this function has variable arguments. This is
560 indicated by the last argument being 'void_type_mode' if there
561 are no variable arguments. If there are variable arguments, then
562 we won't pass anything in registers */
563
564 if (cum->nregs)
565 {
566 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
567 param != (tree)0;
568 param = next_param)
569 {
570 next_param = TREE_CHAIN (param);
571 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
572 cum->nregs = 0;
573 }
574 }
575
576 if (TARGET_DEBUG_ARG)
577 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
578
579 return;
580 }
581
582 /* Update the data in CUM to advance over an argument
583 of mode MODE and data type TYPE.
584 (TYPE is null for libcalls where that information may not be available.) */
585
586 void
587 function_arg_advance (cum, mode, type, named)
588 CUMULATIVE_ARGS *cum; /* current arg information */
589 enum machine_mode mode; /* current arg mode */
590 tree type; /* type of the argument or 0 if lib support */
591 int named; /* whether or not the argument was named */
592 {
593 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
594 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
595
596 if (TARGET_DEBUG_ARG)
597 fprintf (stderr,
598 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
599 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
600
601 cum->words += words;
602 cum->nregs -= words;
603 cum->regno += words;
604
605 if (cum->nregs <= 0)
606 {
607 cum->nregs = 0;
608 cum->regno = 0;
609 }
610
611 return;
612 }
613
614 /* Define where to put the arguments to a function.
615 Value is zero to push the argument on the stack,
616 or a hard register in which to store the argument.
617
618 MODE is the argument's machine mode.
619 TYPE is the data type of the argument (as a tree).
620 This is null for libcalls where that information may
621 not be available.
622 CUM is a variable of type CUMULATIVE_ARGS which gives info about
623 the preceding args and about the function being called.
624 NAMED is nonzero if this argument is a named parameter
625 (otherwise it is an extra parameter matching an ellipsis). */
626
627 struct rtx_def *
628 function_arg (cum, mode, type, named)
629 CUMULATIVE_ARGS *cum; /* current arg information */
630 enum machine_mode mode; /* current arg mode */
631 tree type; /* type of the argument or 0 if lib support */
632 int named; /* != 0 for normal args, == 0 for ... args */
633 {
634 rtx ret = NULL_RTX;
635 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
636 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
637
638 switch (mode)
639 {
640 default: /* for now, pass fp/complex values on the stack */
641 break;
642
643 case BLKmode:
644 case DImode:
645 case SImode:
646 case HImode:
647 case QImode:
648 if (words <= cum->nregs)
649 ret = gen_rtx (REG, mode, cum->regno);
650 break;
651 }
652
653 if (TARGET_DEBUG_ARG)
654 {
655 fprintf (stderr,
656 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
657 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
658
659 if (ret)
660 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
661 else
662 fprintf (stderr, ", stack");
663
664 fprintf (stderr, " )\n");
665 }
666
667 return ret;
668 }
669
670 /* For an arg passed partly in registers and partly in memory,
671 this is the number of registers used.
672 For args passed entirely in registers or entirely in memory, zero. */
673
674 int
675 function_arg_partial_nregs (cum, mode, type, named)
676 CUMULATIVE_ARGS *cum; /* current arg information */
677 enum machine_mode mode; /* current arg mode */
678 tree type; /* type of the argument or 0 if lib support */
679 int named; /* != 0 for normal args, == 0 for ... args */
680 {
681 return 0;
682 }
683
684 \f
685 /* Output an insn whose source is a 386 integer register. SRC is the
686 rtx for the register, and TEMPLATE is the op-code template. SRC may
687 be either SImode or DImode.
688
689 The template will be output with operands[0] as SRC, and operands[1]
690 as a pointer to the top of the 386 stack. So a call from floatsidf2
691 would look like this:
692
693 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
694
695 where %z0 corresponds to the caller's operands[1], and is used to
696 emit the proper size suffix.
697
698 ??? Extend this to handle HImode - a 387 can load and store HImode
699 values directly. */
700
701 void
702 output_op_from_reg (src, template)
703 rtx src;
704 char *template;
705 {
706 rtx xops[4];
707 int size = GET_MODE_SIZE (GET_MODE (src));
708
709 xops[0] = src;
710 xops[1] = AT_SP (Pmode);
711 xops[2] = GEN_INT (size);
712 xops[3] = stack_pointer_rtx;
713
714 if (size > UNITS_PER_WORD)
715 {
716 rtx high;
717 if (size > 2 * UNITS_PER_WORD)
718 {
719 high = gen_rtx (REG, SImode, REGNO (src) + 2);
720 output_asm_insn (AS1 (push%L0,%0), &high);
721 }
722 high = gen_rtx (REG, SImode, REGNO (src) + 1);
723 output_asm_insn (AS1 (push%L0,%0), &high);
724 }
725 output_asm_insn (AS1 (push%L0,%0), &src);
726
727 output_asm_insn (template, xops);
728
729 output_asm_insn (AS2 (add%L3,%2,%3), xops);
730 }
731 \f
732 /* Output an insn to pop an value from the 387 top-of-stack to 386
733 register DEST. The 387 register stack is popped if DIES is true. If
734 the mode of DEST is an integer mode, a `fist' integer store is done,
735 otherwise a `fst' float store is done. */
736
737 void
738 output_to_reg (dest, dies)
739 rtx dest;
740 int dies;
741 {
742 rtx xops[4];
743 int size = GET_MODE_SIZE (GET_MODE (dest));
744
745 xops[0] = AT_SP (Pmode);
746 xops[1] = stack_pointer_rtx;
747 xops[2] = GEN_INT (size);
748 xops[3] = dest;
749
750 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
751
752 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
753 {
754 if (dies)
755 output_asm_insn (AS1 (fistp%z3,%y0), xops);
756 else
757 output_asm_insn (AS1 (fist%z3,%y0), xops);
758 }
759 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
760 {
761 if (dies)
762 output_asm_insn (AS1 (fstp%z3,%y0), xops);
763 else
764 {
765 if (GET_MODE (dest) == XFmode)
766 {
767 output_asm_insn (AS1 (fstp%z3,%y0), xops);
768 output_asm_insn (AS1 (fld%z3,%y0), xops);
769 }
770 else
771 output_asm_insn (AS1 (fst%z3,%y0), xops);
772 }
773 }
774 else
775 abort ();
776
777 output_asm_insn (AS1 (pop%L0,%0), &dest);
778
779 if (size > UNITS_PER_WORD)
780 {
781 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
782 output_asm_insn (AS1 (pop%L0,%0), &dest);
783 if (size > 2 * UNITS_PER_WORD)
784 {
785 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
786 output_asm_insn (AS1 (pop%L0,%0), &dest);
787 }
788 }
789 }
790 \f
791 char *
792 singlemove_string (operands)
793 rtx *operands;
794 {
795 rtx x;
796 if (GET_CODE (operands[0]) == MEM
797 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
798 {
799 if (XEXP (x, 0) != stack_pointer_rtx)
800 abort ();
801 return "push%L1 %1";
802 }
803 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
804 {
805 return output_move_const_single (operands);
806 }
807 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
808 return AS2 (mov%L0,%1,%0);
809 else if (CONSTANT_P (operands[1]))
810 return AS2 (mov%L0,%1,%0);
811 else
812 {
813 output_asm_insn ("push%L1 %1", operands);
814 return "pop%L0 %0";
815 }
816 }
817 \f
818 /* Return a REG that occurs in ADDR with coefficient 1.
819 ADDR can be effectively incremented by incrementing REG. */
820
821 static rtx
822 find_addr_reg (addr)
823 rtx addr;
824 {
825 while (GET_CODE (addr) == PLUS)
826 {
827 if (GET_CODE (XEXP (addr, 0)) == REG)
828 addr = XEXP (addr, 0);
829 else if (GET_CODE (XEXP (addr, 1)) == REG)
830 addr = XEXP (addr, 1);
831 else if (CONSTANT_P (XEXP (addr, 0)))
832 addr = XEXP (addr, 1);
833 else if (CONSTANT_P (XEXP (addr, 1)))
834 addr = XEXP (addr, 0);
835 else
836 abort ();
837 }
838 if (GET_CODE (addr) == REG)
839 return addr;
840 abort ();
841 }
842
843 \f
844 /* Output an insn to add the constant N to the register X. */
845
846 static void
847 asm_add (n, x)
848 int n;
849 rtx x;
850 {
851 rtx xops[2];
852 xops[0] = x;
853
854 if (n == -1)
855 output_asm_insn (AS1 (dec%L0,%0), xops);
856 else if (n == 1)
857 output_asm_insn (AS1 (inc%L0,%0), xops);
858 else if (n < 0)
859 {
860 xops[1] = GEN_INT (-n);
861 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
862 }
863 else if (n > 0)
864 {
865 xops[1] = GEN_INT (n);
866 output_asm_insn (AS2 (add%L0,%1,%0), xops);
867 }
868 }
869
870 \f
871 /* Output assembler code to perform a doubleword move insn
872 with operands OPERANDS. */
873
874 char *
875 output_move_double (operands)
876 rtx *operands;
877 {
878 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
879 rtx latehalf[2];
880 rtx middlehalf[2];
881 rtx xops[2];
882 rtx addreg0 = 0, addreg1 = 0;
883 int dest_overlapped_low = 0;
884 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
885
886 middlehalf[0] = 0;
887 middlehalf[1] = 0;
888
889 /* First classify both operands. */
890
891 if (REG_P (operands[0]))
892 optype0 = REGOP;
893 else if (offsettable_memref_p (operands[0]))
894 optype0 = OFFSOP;
895 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
896 optype0 = POPOP;
897 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
898 optype0 = PUSHOP;
899 else if (GET_CODE (operands[0]) == MEM)
900 optype0 = MEMOP;
901 else
902 optype0 = RNDOP;
903
904 if (REG_P (operands[1]))
905 optype1 = REGOP;
906 else if (CONSTANT_P (operands[1]))
907 optype1 = CNSTOP;
908 else if (offsettable_memref_p (operands[1]))
909 optype1 = OFFSOP;
910 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
911 optype1 = POPOP;
912 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
913 optype1 = PUSHOP;
914 else if (GET_CODE (operands[1]) == MEM)
915 optype1 = MEMOP;
916 else
917 optype1 = RNDOP;
918
919 /* Check for the cases that the operand constraints are not
920 supposed to allow to happen. Abort if we get one,
921 because generating code for these cases is painful. */
922
923 if (optype0 == RNDOP || optype1 == RNDOP)
924 abort ();
925
926 /* If one operand is decrementing and one is incrementing
927 decrement the former register explicitly
928 and change that operand into ordinary indexing. */
929
930 if (optype0 == PUSHOP && optype1 == POPOP)
931 {
932 /* ??? Can this ever happen on i386? */
933 operands[0] = XEXP (XEXP (operands[0], 0), 0);
934 asm_add (-size, operands[0]);
935 if (GET_MODE (operands[1]) == XFmode)
936 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
937 else if (GET_MODE (operands[0]) == DFmode)
938 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
939 else
940 operands[0] = gen_rtx (MEM, DImode, operands[0]);
941 optype0 = OFFSOP;
942 }
943
944 if (optype0 == POPOP && optype1 == PUSHOP)
945 {
946 /* ??? Can this ever happen on i386? */
947 operands[1] = XEXP (XEXP (operands[1], 0), 0);
948 asm_add (-size, operands[1]);
949 if (GET_MODE (operands[1]) == XFmode)
950 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
951 else if (GET_MODE (operands[1]) == DFmode)
952 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
953 else
954 operands[1] = gen_rtx (MEM, DImode, operands[1]);
955 optype1 = OFFSOP;
956 }
957
958 /* If an operand is an unoffsettable memory ref, find a register
959 we can increment temporarily to make it refer to the second word. */
960
961 if (optype0 == MEMOP)
962 addreg0 = find_addr_reg (XEXP (operands[0], 0));
963
964 if (optype1 == MEMOP)
965 addreg1 = find_addr_reg (XEXP (operands[1], 0));
966
967 /* Ok, we can do one word at a time.
968 Normally we do the low-numbered word first,
969 but if either operand is autodecrementing then we
970 do the high-numbered word first.
971
972 In either case, set up in LATEHALF the operands to use
973 for the high-numbered word and in some cases alter the
974 operands in OPERANDS to be suitable for the low-numbered word. */
975
976 if (size == 12)
977 {
978 if (optype0 == REGOP)
979 {
980 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
981 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
982 }
983 else if (optype0 == OFFSOP)
984 {
985 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
986 latehalf[0] = adj_offsettable_operand (operands[0], 8);
987 }
988 else
989 {
990 middlehalf[0] = operands[0];
991 latehalf[0] = operands[0];
992 }
993
994 if (optype1 == REGOP)
995 {
996 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
997 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
998 }
999 else if (optype1 == OFFSOP)
1000 {
1001 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1002 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1003 }
1004 else if (optype1 == CNSTOP)
1005 {
1006 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1007 {
1008 REAL_VALUE_TYPE r; long l[3];
1009
1010 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1011 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1012 operands[1] = GEN_INT (l[0]);
1013 middlehalf[1] = GEN_INT (l[1]);
1014 latehalf[1] = GEN_INT (l[2]);
1015 }
1016 else if (CONSTANT_P (operands[1]))
1017 /* No non-CONST_DOUBLE constant should ever appear here. */
1018 abort ();
1019 }
1020 else
1021 {
1022 middlehalf[1] = operands[1];
1023 latehalf[1] = operands[1];
1024 }
1025 }
1026 else /* size is not 12: */
1027 {
1028 if (optype0 == REGOP)
1029 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1030 else if (optype0 == OFFSOP)
1031 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1032 else
1033 latehalf[0] = operands[0];
1034
1035 if (optype1 == REGOP)
1036 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1037 else if (optype1 == OFFSOP)
1038 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1039 else if (optype1 == CNSTOP)
1040 split_double (operands[1], &operands[1], &latehalf[1]);
1041 else
1042 latehalf[1] = operands[1];
1043 }
1044
1045 /* If insn is effectively movd N (sp),-(sp) then we will do the
1046 high word first. We should use the adjusted operand 1
1047 (which is N+4 (sp) or N+8 (sp))
1048 for the low word and middle word as well,
1049 to compensate for the first decrement of sp. */
1050 if (optype0 == PUSHOP
1051 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1052 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1053 middlehalf[1] = operands[1] = latehalf[1];
1054
1055 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1056 if the upper part of reg N does not appear in the MEM, arrange to
1057 emit the move late-half first. Otherwise, compute the MEM address
1058 into the upper part of N and use that as a pointer to the memory
1059 operand. */
1060 if (optype0 == REGOP
1061 && (optype1 == OFFSOP || optype1 == MEMOP))
1062 {
1063 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1064 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1065 {
1066 /* If both halves of dest are used in the src memory address,
1067 compute the address into latehalf of dest. */
1068 compadr:
1069 xops[0] = latehalf[0];
1070 xops[1] = XEXP (operands[1], 0);
1071 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1072 if( GET_MODE (operands[1]) == XFmode )
1073 {
1074 /* abort (); */
1075 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1076 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1077 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1078 }
1079 else
1080 {
1081 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1082 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1083 }
1084 }
1085 else if (size == 12
1086 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1087 {
1088 /* Check for two regs used by both source and dest. */
1089 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1090 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1091 goto compadr;
1092
1093 /* JRV says this can't happen: */
1094 if (addreg0 || addreg1)
1095 abort();
1096
1097 /* Only the middle reg conflicts; simply put it last. */
1098 output_asm_insn (singlemove_string (operands), operands);
1099 output_asm_insn (singlemove_string (latehalf), latehalf);
1100 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1101 return "";
1102 }
1103 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1104 /* If the low half of dest is mentioned in the source memory
1105 address, the arrange to emit the move late half first. */
1106 dest_overlapped_low = 1;
1107 }
1108
1109 /* If one or both operands autodecrementing,
1110 do the two words, high-numbered first. */
1111
1112 /* Likewise, the first move would clobber the source of the second one,
1113 do them in the other order. This happens only for registers;
1114 such overlap can't happen in memory unless the user explicitly
1115 sets it up, and that is an undefined circumstance. */
1116
1117 /*
1118 if (optype0 == PUSHOP || optype1 == PUSHOP
1119 || (optype0 == REGOP && optype1 == REGOP
1120 && REGNO (operands[0]) == REGNO (latehalf[1]))
1121 || dest_overlapped_low)
1122 */
1123 if (optype0 == PUSHOP || optype1 == PUSHOP
1124 || (optype0 == REGOP && optype1 == REGOP
1125 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1126 || REGNO (operands[0]) == REGNO (latehalf[1])))
1127 || dest_overlapped_low)
1128 {
1129 /* Make any unoffsettable addresses point at high-numbered word. */
1130 if (addreg0)
1131 asm_add (size-4, addreg0);
1132 if (addreg1)
1133 asm_add (size-4, addreg1);
1134
1135 /* Do that word. */
1136 output_asm_insn (singlemove_string (latehalf), latehalf);
1137
1138 /* Undo the adds we just did. */
1139 if (addreg0)
1140 asm_add (-4, addreg0);
1141 if (addreg1)
1142 asm_add (-4, addreg1);
1143
1144 if (size == 12)
1145 {
1146 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1147 if (addreg0)
1148 asm_add (-4, addreg0);
1149 if (addreg1)
1150 asm_add (-4, addreg1);
1151 }
1152
1153 /* Do low-numbered word. */
1154 return singlemove_string (operands);
1155 }
1156
1157 /* Normal case: do the two words, low-numbered first. */
1158
1159 output_asm_insn (singlemove_string (operands), operands);
1160
1161 /* Do the middle one of the three words for long double */
1162 if (size == 12)
1163 {
1164 if (addreg0)
1165 asm_add (4, addreg0);
1166 if (addreg1)
1167 asm_add (4, addreg1);
1168
1169 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1170 }
1171
1172 /* Make any unoffsettable addresses point at high-numbered word. */
1173 if (addreg0)
1174 asm_add (4, addreg0);
1175 if (addreg1)
1176 asm_add (4, addreg1);
1177
1178 /* Do that word. */
1179 output_asm_insn (singlemove_string (latehalf), latehalf);
1180
1181 /* Undo the adds we just did. */
1182 if (addreg0)
1183 asm_add (4-size, addreg0);
1184 if (addreg1)
1185 asm_add (4-size, addreg1);
1186
1187 return "";
1188 }
1189
1190 \f
1191 #define MAX_TMPS 2 /* max temporary registers used */
1192
1193 /* Output the appropriate code to move push memory on the stack */
1194
1195 char *
1196 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1197 rtx operands[];
1198 rtx insn;
1199 int length;
1200 int tmp_start;
1201 int n_operands;
1202 {
1203
1204 struct {
1205 char *load;
1206 char *push;
1207 rtx xops[2];
1208 } tmp_info[MAX_TMPS];
1209
1210 rtx src = operands[1];
1211 int max_tmps = 0;
1212 int offset = 0;
1213 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1214 int stack_offset = 0;
1215 int i, num_tmps;
1216 rtx xops[1];
1217
1218 if (!offsettable_memref_p (src))
1219 fatal_insn ("Source is not offsettable", insn);
1220
1221 if ((length & 3) != 0)
1222 fatal_insn ("Pushing non-word aligned size", insn);
1223
1224 /* Figure out which temporary registers we have available */
1225 for (i = tmp_start; i < n_operands; i++)
1226 {
1227 if (GET_CODE (operands[i]) == REG)
1228 {
1229 if (reg_overlap_mentioned_p (operands[i], src))
1230 continue;
1231
1232 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1233 if (max_tmps == MAX_TMPS)
1234 break;
1235 }
1236 }
1237
1238 if (max_tmps == 0)
1239 for (offset = length - 4; offset >= 0; offset -= 4)
1240 {
1241 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1242 output_asm_insn (AS1(push%L0,%0), xops);
1243 if (stack_p)
1244 stack_offset += 4;
1245 }
1246
1247 else
1248 for (offset = length - 4; offset >= 0; )
1249 {
1250 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1251 {
1252 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1253 tmp_info[num_tmps].push = AS1(push%L0,%1);
1254 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1255 offset -= 4;
1256 }
1257
1258 for (i = 0; i < num_tmps; i++)
1259 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1260
1261 for (i = 0; i < num_tmps; i++)
1262 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1263
1264 if (stack_p)
1265 stack_offset += 4*num_tmps;
1266 }
1267
1268 return "";
1269 }
1270
1271 \f
1272
1273 /* Output the appropriate code to move data between two memory locations */
1274
1275 char *
1276 output_move_memory (operands, insn, length, tmp_start, n_operands)
1277 rtx operands[];
1278 rtx insn;
1279 int length;
1280 int tmp_start;
1281 int n_operands;
1282 {
1283 struct {
1284 char *load;
1285 char *store;
1286 rtx xops[3];
1287 } tmp_info[MAX_TMPS];
1288
1289 rtx dest = operands[0];
1290 rtx src = operands[1];
1291 rtx qi_tmp = NULL_RTX;
1292 int max_tmps = 0;
1293 int offset = 0;
1294 int i, num_tmps;
1295 rtx xops[3];
1296
1297 if (GET_CODE (dest) == MEM
1298 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1299 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1300 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1301
1302 if (!offsettable_memref_p (src))
1303 fatal_insn ("Source is not offsettable", insn);
1304
1305 if (!offsettable_memref_p (dest))
1306 fatal_insn ("Destination is not offsettable", insn);
1307
1308 /* Figure out which temporary registers we have available */
1309 for (i = tmp_start; i < n_operands; i++)
1310 {
1311 if (GET_CODE (operands[i]) == REG)
1312 {
1313 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1314 qi_tmp = operands[i];
1315
1316 if (reg_overlap_mentioned_p (operands[i], dest))
1317 fatal_insn ("Temporary register overlaps the destination", insn);
1318
1319 if (reg_overlap_mentioned_p (operands[i], src))
1320 fatal_insn ("Temporary register overlaps the source", insn);
1321
1322 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1323 if (max_tmps == MAX_TMPS)
1324 break;
1325 }
1326 }
1327
1328 if (max_tmps == 0)
1329 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1330
1331 if ((length & 1) != 0)
1332 {
1333 if (!qi_tmp)
1334 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1335 }
1336
1337 while (length > 1)
1338 {
1339 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1340 {
1341 if (length >= 4)
1342 {
1343 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1344 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1345 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1346 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1347 offset += 4;
1348 length -= 4;
1349 }
1350 else if (length >= 2)
1351 {
1352 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1353 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1354 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1355 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1356 offset += 2;
1357 length -= 2;
1358 }
1359 else
1360 break;
1361 }
1362
1363 for (i = 0; i < num_tmps; i++)
1364 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1365
1366 for (i = 0; i < num_tmps; i++)
1367 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1368 }
1369
1370 if (length == 1)
1371 {
1372 xops[0] = adj_offsettable_operand (dest, offset);
1373 xops[1] = adj_offsettable_operand (src, offset);
1374 xops[2] = qi_tmp;
1375 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1376 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1377 }
1378
1379 return "";
1380 }
1381
1382 \f
1383 int
1384 standard_80387_constant_p (x)
1385 rtx x;
1386 {
1387 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1388 REAL_VALUE_TYPE d;
1389 jmp_buf handler;
1390 int is0, is1;
1391
1392 if (setjmp (handler))
1393 return 0;
1394
1395 set_float_handler (handler);
1396 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1397 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1398 is1 = REAL_VALUES_EQUAL (d, dconst1);
1399 set_float_handler (NULL_PTR);
1400
1401 if (is0)
1402 return 1;
1403
1404 if (is1)
1405 return 2;
1406
1407 /* Note that on the 80387, other constants, such as pi,
1408 are much slower to load as standard constants
1409 than to load from doubles in memory! */
1410 #endif
1411
1412 return 0;
1413 }
1414
1415 char *
1416 output_move_const_single (operands)
1417 rtx *operands;
1418 {
1419 if (FP_REG_P (operands[0]))
1420 {
1421 int conval = standard_80387_constant_p (operands[1]);
1422
1423 if (conval == 1)
1424 return "fldz";
1425
1426 if (conval == 2)
1427 return "fld1";
1428 }
1429 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1430 {
1431 REAL_VALUE_TYPE r; long l;
1432
1433 if (GET_MODE (operands[1]) == XFmode)
1434 abort ();
1435
1436 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1437 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1438 operands[1] = GEN_INT (l);
1439 }
1440 return singlemove_string (operands);
1441 }
1442 \f
1443 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1444 reference and a constant. */
1445
1446 int
1447 symbolic_operand (op, mode)
1448 register rtx op;
1449 enum machine_mode mode;
1450 {
1451 switch (GET_CODE (op))
1452 {
1453 case SYMBOL_REF:
1454 case LABEL_REF:
1455 return 1;
1456 case CONST:
1457 op = XEXP (op, 0);
1458 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1459 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1460 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1461 default:
1462 return 0;
1463 }
1464 }
1465
1466 /* Test for a valid operand for a call instruction.
1467 Don't allow the arg pointer register or virtual regs
1468 since they may change into reg + const, which the patterns
1469 can't handle yet. */
1470
1471 int
1472 call_insn_operand (op, mode)
1473 rtx op;
1474 enum machine_mode mode;
1475 {
1476 if (GET_CODE (op) == MEM
1477 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1478 /* This makes a difference for PIC. */
1479 && general_operand (XEXP (op, 0), Pmode))
1480 || (GET_CODE (XEXP (op, 0)) == REG
1481 && XEXP (op, 0) != arg_pointer_rtx
1482 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1483 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1484 return 1;
1485 return 0;
1486 }
1487
1488 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1489 even if pic. */
1490
1491 int
1492 expander_call_insn_operand (op, mode)
1493 rtx op;
1494 enum machine_mode mode;
1495 {
1496 if (GET_CODE (op) == MEM
1497 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1498 || (GET_CODE (XEXP (op, 0)) == REG
1499 && XEXP (op, 0) != arg_pointer_rtx
1500 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1501 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1502 return 1;
1503 return 0;
1504 }
1505
1506 /* Return 1 if OP is a comparison operator that can use the condition code
1507 generated by an arithmetic operation. */
1508
1509 int
1510 arithmetic_comparison_operator (op, mode)
1511 register rtx op;
1512 enum machine_mode mode;
1513 {
1514 enum rtx_code code;
1515
1516 if (mode != VOIDmode && mode != GET_MODE (op))
1517 return 0;
1518 code = GET_CODE (op);
1519 if (GET_RTX_CLASS (code) != '<')
1520 return 0;
1521
1522 return (code != GT && code != LE);
1523 }
1524 \f
1525 /* Returns 1 if OP contains a symbol reference */
1526
1527 int
1528 symbolic_reference_mentioned_p (op)
1529 rtx op;
1530 {
1531 register char *fmt;
1532 register int i;
1533
1534 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1535 return 1;
1536
1537 fmt = GET_RTX_FORMAT (GET_CODE (op));
1538 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1539 {
1540 if (fmt[i] == 'E')
1541 {
1542 register int j;
1543
1544 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1545 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1546 return 1;
1547 }
1548 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1549 return 1;
1550 }
1551
1552 return 0;
1553 }
1554 \f
1555 /* Attempt to expand a binary operator. Make the expansion closer to the
1556 actual machine, then just general_operand, which will allow 3 separate
1557 memory references (one output, two input) in a single insn. Return
1558 whether the insn fails, or succeeds. */
1559
1560 int
1561 ix86_expand_binary_operator (code, mode, operands)
1562 enum rtx_code code;
1563 enum machine_mode mode;
1564 rtx operands[];
1565 {
1566 rtx insn;
1567 int i;
1568 int modified;
1569
1570 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1571 if (GET_RTX_CLASS (code) == 'c'
1572 && (rtx_equal_p (operands[0], operands[2])
1573 || immediate_operand (operands[1], mode)))
1574 {
1575 rtx temp = operands[1];
1576 operands[1] = operands[2];
1577 operands[2] = temp;
1578 }
1579
1580 /* If optimizing, copy to regs to improve CSE */
1581 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1582 {
1583 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1584 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1585
1586 if (GET_CODE (operands[2]) == MEM)
1587 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1588
1589 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1590 {
1591 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1592 emit_move_insn (temp, operands[1]);
1593 operands[1] = temp;
1594 return TRUE;
1595 }
1596 }
1597
1598 if (!ix86_binary_operator_ok (code, mode, operands))
1599 {
1600 /* If not optimizing, try to make a valid insn (optimize code previously did
1601 this above to improve chances of CSE) */
1602
1603 if ((!TARGET_PSEUDO || !optimize)
1604 && ((reload_in_progress | reload_completed) == 0)
1605 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1606 {
1607 modified = FALSE;
1608 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1609 {
1610 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1611 modified = TRUE;
1612 }
1613
1614 if (GET_CODE (operands[2]) == MEM)
1615 {
1616 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1617 modified = TRUE;
1618 }
1619
1620 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1621 {
1622 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1623 emit_move_insn (temp, operands[1]);
1624 operands[1] = temp;
1625 return TRUE;
1626 }
1627
1628 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1629 return FALSE;
1630 }
1631 else
1632 return FALSE;
1633 }
1634
1635 return TRUE;
1636 }
1637 \f
1638 /* Return TRUE or FALSE depending on whether the binary operator meets the
1639 appropriate constraints. */
1640
1641 int
1642 ix86_binary_operator_ok (code, mode, operands)
1643 enum rtx_code code;
1644 enum machine_mode mode;
1645 rtx operands[3];
1646 {
1647 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1648 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1649 }
1650 \f
1651 /* Attempt to expand a unary operator. Make the expansion closer to the
1652 actual machine, then just general_operand, which will allow 2 separate
1653 memory references (one output, one input) in a single insn. Return
1654 whether the insn fails, or succeeds. */
1655
1656 int
1657 ix86_expand_unary_operator (code, mode, operands)
1658 enum rtx_code code;
1659 enum machine_mode mode;
1660 rtx operands[];
1661 {
1662 rtx insn;
1663
1664 /* If optimizing, copy to regs to improve CSE */
1665 if (TARGET_PSEUDO
1666 && optimize
1667 && ((reload_in_progress | reload_completed) == 0)
1668 && GET_CODE (operands[1]) == MEM)
1669 {
1670 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1671 }
1672
1673 if (!ix86_unary_operator_ok (code, mode, operands))
1674 {
1675 if ((!TARGET_PSEUDO || !optimize)
1676 && ((reload_in_progress | reload_completed) == 0)
1677 && GET_CODE (operands[1]) == MEM)
1678 {
1679 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1680 if (!ix86_unary_operator_ok (code, mode, operands))
1681 return FALSE;
1682 }
1683 else
1684 return FALSE;
1685 }
1686
1687 return TRUE;
1688 }
1689 \f
1690 /* Return TRUE or FALSE depending on whether the unary operator meets the
1691 appropriate constraints. */
1692
1693 int
1694 ix86_unary_operator_ok (code, mode, operands)
1695 enum rtx_code code;
1696 enum machine_mode mode;
1697 rtx operands[2];
1698 {
1699 return TRUE;
1700 }
1701
1702 \f
1703
1704 static rtx pic_label_rtx;
1705 static char pic_label_name [256];
1706 static int pic_label_no = 0;
1707
1708 /* This function generates code for -fpic that loads %ebx with
1709 with the return address of the caller and then returns. */
1710 void
1711 asm_output_function_prefix (file, name)
1712 FILE * file;
1713 char * name;
1714 {
1715 rtx xops[2];
1716 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1717 || current_function_uses_const_pool);
1718 xops[0] = pic_offset_table_rtx;
1719 xops[1] = stack_pointer_rtx;
1720
1721 /* deep branch prediction favors having a return for every call */
1722 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1723 {
1724 tree prologue_node;
1725
1726 if (pic_label_rtx == 0)
1727 {
1728 pic_label_rtx = (rtx) gen_label_rtx ();
1729 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1730 LABEL_NAME (pic_label_rtx) = pic_label_name;
1731 }
1732 prologue_node = make_node (FUNCTION_DECL);
1733 DECL_RESULT (prologue_node) = 0;
1734 #ifdef ASM_DECLARE_FUNCTION_NAME
1735 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1736 #endif
1737 output_asm_insn ("movl (%1),%0", xops);
1738 output_asm_insn ("ret", xops);
1739 }
1740 }
1741
1742 /* Set up the stack and frame (if desired) for the function. */
1743
1744 void
1745 function_prologue (file, size)
1746 FILE *file;
1747 int size;
1748 {
1749 register int regno;
1750 int limit;
1751 rtx xops[4];
1752 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1753 || current_function_uses_const_pool);
1754 long tsize = get_frame_size ();
1755
1756 /* pic references don't explicitly mention pic_offset_table_rtx */
1757 if (TARGET_SCHEDULE_PROLOGUE)
1758 {
1759 pic_label_rtx = 0;
1760 return;
1761 }
1762
1763 xops[0] = stack_pointer_rtx;
1764 xops[1] = frame_pointer_rtx;
1765 xops[2] = GEN_INT (tsize);
1766
1767 if (frame_pointer_needed)
1768 {
1769 output_asm_insn ("push%L1 %1", xops);
1770 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1771 }
1772
1773 if (tsize == 0)
1774 ;
1775 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1776 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1777 else
1778 {
1779 xops[3] = gen_rtx (REG, SImode, 0);
1780 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1781
1782 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1783 output_asm_insn (AS1 (call,%P3), xops);
1784 }
1785
1786 /* Note If use enter it is NOT reversed args.
1787 This one is not reversed from intel!!
1788 I think enter is slower. Also sdb doesn't like it.
1789 But if you want it the code is:
1790 {
1791 xops[3] = const0_rtx;
1792 output_asm_insn ("enter %2,%3", xops);
1793 }
1794 */
1795 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1796 for (regno = limit - 1; regno >= 0; regno--)
1797 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1798 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1799 {
1800 xops[0] = gen_rtx (REG, SImode, regno);
1801 output_asm_insn ("push%L0 %0", xops);
1802 }
1803
1804 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1805 {
1806 xops[0] = pic_offset_table_rtx;
1807 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1808
1809 output_asm_insn (AS1 (call,%P1), xops);
1810 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1811 pic_label_rtx = 0;
1812 }
1813 else if (pic_reg_used)
1814 {
1815 xops[0] = pic_offset_table_rtx;
1816 xops[1] = (rtx) gen_label_rtx ();
1817
1818 output_asm_insn (AS1 (call,%P1), xops);
1819 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1820 output_asm_insn (AS1 (pop%L0,%0), xops);
1821 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1822 }
1823 }
1824
1825 /* This function generates the assembly code for function entry.
1826 FILE is an stdio stream to output the code to.
1827 SIZE is an int: how many units of temporary storage to allocate. */
1828
1829 void
1830 ix86_expand_prologue ()
1831 {
1832 register int regno;
1833 int limit;
1834 rtx xops[4];
1835 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1836 || current_function_uses_const_pool);
1837 long tsize = get_frame_size ();
1838
1839 if (!TARGET_SCHEDULE_PROLOGUE)
1840 return;
1841
1842 xops[0] = stack_pointer_rtx;
1843 xops[1] = frame_pointer_rtx;
1844 xops[2] = GEN_INT (tsize);
1845 if (frame_pointer_needed)
1846 {
1847 emit_insn (gen_rtx (SET, 0,
1848 gen_rtx (MEM, SImode,
1849 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1850 frame_pointer_rtx));
1851 emit_move_insn (xops[1], xops[0]);
1852 }
1853
1854 if (tsize == 0)
1855 ;
1856 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1857 emit_insn (gen_subsi3 (xops[0], xops[0], xops[2]));
1858 else
1859 {
1860 xops[3] = gen_rtx (REG, SImode, 0);
1861 emit_move_insn (xops[3], xops[2]);
1862 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
1863 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
1864 emit_call_insn (gen_rtx (CALL, VOIDmode,
1865 xops[3], const0_rtx));
1866 }
1867
1868 /* Note If use enter it is NOT reversed args.
1869 This one is not reversed from intel!!
1870 I think enter is slower. Also sdb doesn't like it.
1871 But if you want it the code is:
1872 {
1873 xops[3] = const0_rtx;
1874 output_asm_insn ("enter %2,%3", xops);
1875 }
1876 */
1877 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1878 for (regno = limit - 1; regno >= 0; regno--)
1879 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1880 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1881 {
1882 xops[0] = gen_rtx (REG, SImode, regno);
1883 emit_insn (gen_rtx (SET, 0,
1884 gen_rtx (MEM, SImode,
1885 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1886 xops[0]));
1887 }
1888
1889 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1890 {
1891 xops[0] = pic_offset_table_rtx;
1892 if (pic_label_rtx == 0)
1893 {
1894 pic_label_rtx = (rtx) gen_label_rtx ();
1895 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1896 LABEL_NAME (pic_label_rtx) = pic_label_name;
1897 }
1898 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
1899
1900 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1901 emit_insn (gen_prologue_set_got (xops[0],
1902 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1903 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1904 }
1905 else if (pic_reg_used)
1906 {
1907 xops[0] = pic_offset_table_rtx;
1908 xops[1] = (rtx) gen_label_rtx ();
1909
1910 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1911 emit_insn (gen_pop (xops[0]));
1912 emit_insn (gen_prologue_set_got (xops[0],
1913 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1914 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1915 }
1916 }
1917
1918 /* Restore function stack, frame, and registers. */
1919
1920 void
1921 function_epilogue (file, size)
1922 FILE *file;
1923 int size;
1924 {
1925 }
1926
1927 /* Return 1 if it is appropriate to emit `ret' instructions in the
1928 body of a function. Do this only if the epilogue is simple, needing a
1929 couple of insns. Prior to reloading, we can't tell how many registers
1930 must be saved, so return 0 then. Return 0 if there is no frame
1931 marker to de-allocate.
1932
1933 If NON_SAVING_SETJMP is defined and true, then it is not possible
1934 for the epilogue to be simple, so return 0. This is a special case
1935 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1936 until final, but jump_optimize may need to know sooner if a
1937 `return' is OK. */
1938
1939 int
1940 ix86_can_use_return_insn_p ()
1941 {
1942 int regno;
1943 int nregs = 0;
1944 int reglimit = (frame_pointer_needed
1945 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1946 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1947 || current_function_uses_const_pool);
1948
1949 #ifdef NON_SAVING_SETJMP
1950 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1951 return 0;
1952 #endif
1953
1954 if (! reload_completed)
1955 return 0;
1956
1957 for (regno = reglimit - 1; regno >= 0; regno--)
1958 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1959 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1960 nregs++;
1961
1962 return nregs == 0 || ! frame_pointer_needed;
1963 }
1964
1965 \f
1966 /* This function generates the assembly code for function exit.
1967 FILE is an stdio stream to output the code to.
1968 SIZE is an int: how many units of temporary storage to deallocate. */
1969
1970 void
1971 ix86_expand_epilogue ()
1972 {
1973 register int regno;
1974 register int nregs, limit;
1975 int offset;
1976 rtx xops[3];
1977 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1978 || current_function_uses_const_pool);
1979 long tsize = get_frame_size ();
1980
1981 /* Compute the number of registers to pop */
1982
1983 limit = (frame_pointer_needed
1984 ? FRAME_POINTER_REGNUM
1985 : STACK_POINTER_REGNUM);
1986
1987 nregs = 0;
1988
1989 for (regno = limit - 1; regno >= 0; regno--)
1990 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1991 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1992 nregs++;
1993
1994 /* sp is often unreliable so we must go off the frame pointer,
1995 */
1996
1997 /* In reality, we may not care if sp is unreliable, because we can
1998 restore the register relative to the frame pointer. In theory,
1999 since each move is the same speed as a pop, and we don't need the
2000 leal, this is faster. For now restore multiple registers the old
2001 way. */
2002
2003 offset = -tsize - (nregs * UNITS_PER_WORD);
2004
2005 xops[2] = stack_pointer_rtx;
2006
2007 if (nregs > 1 || ! frame_pointer_needed)
2008 {
2009 if (frame_pointer_needed)
2010 {
2011 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2012 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2013 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2014 }
2015
2016 for (regno = 0; regno < limit; regno++)
2017 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2018 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2019 {
2020 xops[0] = gen_rtx (REG, SImode, regno);
2021 emit_insn (gen_pop (xops[0]));
2022 /* output_asm_insn ("pop%L0 %0", xops);*/
2023 }
2024 }
2025 else
2026 for (regno = 0; regno < limit; regno++)
2027 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2028 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2029 {
2030 xops[0] = gen_rtx (REG, SImode, regno);
2031 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2032 emit_move_insn (xops[0], xops[1]);
2033 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2034 offset += 4;
2035 }
2036
2037 if (frame_pointer_needed)
2038 {
2039 /* If not an i386, mov & pop is faster than "leave". */
2040
2041 if (TARGET_USE_LEAVE)
2042 emit_insn (gen_leave());
2043 /* output_asm_insn ("leave", xops);*/
2044 else
2045 {
2046 xops[0] = frame_pointer_rtx;
2047 xops[1] = stack_pointer_rtx;
2048 emit_insn (gen_epilogue_set_stack_ptr());
2049 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2050 emit_insn (gen_pop (xops[0]));
2051 /* output_asm_insn ("pop%L0 %0", xops);*/
2052 }
2053 }
2054 else if (tsize)
2055 {
2056 /* If there is no frame pointer, we must still release the frame. */
2057
2058 xops[0] = GEN_INT (tsize);
2059 emit_insn (gen_rtx (SET, SImode,
2060 xops[2],
2061 gen_rtx (PLUS, SImode,
2062 xops[2],
2063 xops[0])));
2064 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2065 }
2066
2067 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2068 if (profile_block_flag == 2)
2069 {
2070 FUNCTION_BLOCK_PROFILER_EXIT(file);
2071 }
2072 #endif
2073
2074 if (current_function_pops_args && current_function_args_size)
2075 {
2076 xops[1] = GEN_INT (current_function_pops_args);
2077
2078 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2079 asked to pop more, pop return address, do explicit add, and jump
2080 indirectly to the caller. */
2081
2082 if (current_function_pops_args >= 32768)
2083 {
2084 /* ??? Which register to use here? */
2085 xops[0] = gen_rtx (REG, SImode, 2);
2086 emit_insn (gen_pop (xops[0]));
2087 /* output_asm_insn ("pop%L0 %0", xops);*/
2088 emit_insn (gen_rtx (SET, SImode,
2089 xops[2],
2090 gen_rtx (PLUS, SImode,
2091 xops[1],
2092 xops[2])));
2093 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2094 emit_jump_insn (xops[0]);
2095 /* output_asm_insn ("jmp %*%0", xops);*/
2096 }
2097 else
2098 emit_jump_insn (gen_return_pop_internal (xops[1]));
2099 /* output_asm_insn ("ret %1", xops);*/
2100 }
2101 else
2102 /* output_asm_insn ("ret", xops);*/
2103 emit_jump_insn (gen_return_internal ());
2104 }
2105
2106 \f
2107 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2108 that is a valid memory address for an instruction.
2109 The MODE argument is the machine mode for the MEM expression
2110 that wants to use this address.
2111
2112 On x86, legitimate addresses are:
2113 base movl (base),reg
2114 displacement movl disp,reg
2115 base + displacement movl disp(base),reg
2116 index + base movl (base,index),reg
2117 (index + base) + displacement movl disp(base,index),reg
2118 index*scale movl (,index,scale),reg
2119 index*scale + disp movl disp(,index,scale),reg
2120 index*scale + base movl (base,index,scale),reg
2121 (index*scale + base) + disp movl disp(base,index,scale),reg
2122
2123 In each case, scale can be 1, 2, 4, 8. */
2124
2125 /* This is exactly the same as print_operand_addr, except that
2126 it recognizes addresses instead of printing them.
2127
2128 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2129 convert common non-canonical forms to canonical form so that they will
2130 be recognized. */
2131
2132 #define ADDR_INVALID(msg,insn) \
2133 do { \
2134 if (TARGET_DEBUG_ADDR) \
2135 { \
2136 fprintf (stderr, msg); \
2137 debug_rtx (insn); \
2138 } \
2139 } while (0)
2140
2141 int
2142 legitimate_address_p (mode, addr, strict)
2143 enum machine_mode mode;
2144 register rtx addr;
2145 int strict;
2146 {
2147 rtx base = NULL_RTX;
2148 rtx indx = NULL_RTX;
2149 rtx scale = NULL_RTX;
2150 rtx disp = NULL_RTX;
2151
2152 if (TARGET_DEBUG_ADDR)
2153 {
2154 fprintf (stderr,
2155 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2156 GET_MODE_NAME (mode), strict);
2157
2158 debug_rtx (addr);
2159 }
2160
2161 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2162 base = addr; /* base reg */
2163
2164 else if (GET_CODE (addr) == PLUS)
2165 {
2166 rtx op0 = XEXP (addr, 0);
2167 rtx op1 = XEXP (addr, 1);
2168 enum rtx_code code0 = GET_CODE (op0);
2169 enum rtx_code code1 = GET_CODE (op1);
2170
2171 if (code0 == REG || code0 == SUBREG)
2172 {
2173 if (code1 == REG || code1 == SUBREG)
2174 {
2175 indx = op0; /* index + base */
2176 base = op1;
2177 }
2178
2179 else
2180 {
2181 base = op0; /* base + displacement */
2182 disp = op1;
2183 }
2184 }
2185
2186 else if (code0 == MULT)
2187 {
2188 indx = XEXP (op0, 0);
2189 scale = XEXP (op0, 1);
2190
2191 if (code1 == REG || code1 == SUBREG)
2192 base = op1; /* index*scale + base */
2193
2194 else
2195 disp = op1; /* index*scale + disp */
2196 }
2197
2198 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2199 {
2200 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2201 scale = XEXP (XEXP (op0, 0), 1);
2202 base = XEXP (op0, 1);
2203 disp = op1;
2204 }
2205
2206 else if (code0 == PLUS)
2207 {
2208 indx = XEXP (op0, 0); /* index + base + disp */
2209 base = XEXP (op0, 1);
2210 disp = op1;
2211 }
2212
2213 else
2214 {
2215 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2216 return FALSE;
2217 }
2218 }
2219
2220 else if (GET_CODE (addr) == MULT)
2221 {
2222 indx = XEXP (addr, 0); /* index*scale */
2223 scale = XEXP (addr, 1);
2224 }
2225
2226 else
2227 disp = addr; /* displacement */
2228
2229 /* Allow arg pointer and stack pointer as index if there is not scaling */
2230 if (base && indx && !scale
2231 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2232 {
2233 rtx tmp = base;
2234 base = indx;
2235 indx = tmp;
2236 }
2237
2238 /* Validate base register */
2239 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2240 is one word out of a two word structure, which is represented internally
2241 as a DImode int. */
2242 if (base)
2243 {
2244 if (GET_CODE (base) != REG)
2245 {
2246 ADDR_INVALID ("Base is not a register.\n", base);
2247 return FALSE;
2248 }
2249
2250 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2251 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2252 {
2253 ADDR_INVALID ("Base is not valid.\n", base);
2254 return FALSE;
2255 }
2256 }
2257
2258 /* Validate index register */
2259 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2260 is one word out of a two word structure, which is represented internally
2261 as a DImode int. */
2262 if (indx)
2263 {
2264 if (GET_CODE (indx) != REG)
2265 {
2266 ADDR_INVALID ("Index is not a register.\n", indx);
2267 return FALSE;
2268 }
2269
2270 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2271 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2272 {
2273 ADDR_INVALID ("Index is not valid.\n", indx);
2274 return FALSE;
2275 }
2276 }
2277 else if (scale)
2278 abort (); /* scale w/o index invalid */
2279
2280 /* Validate scale factor */
2281 if (scale)
2282 {
2283 HOST_WIDE_INT value;
2284
2285 if (GET_CODE (scale) != CONST_INT)
2286 {
2287 ADDR_INVALID ("Scale is not valid.\n", scale);
2288 return FALSE;
2289 }
2290
2291 value = INTVAL (scale);
2292 if (value != 1 && value != 2 && value != 4 && value != 8)
2293 {
2294 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2295 return FALSE;
2296 }
2297 }
2298
2299 /* Validate displacement
2300 Constant pool addresses must be handled special. They are
2301 considered legitimate addresses, but only if not used with regs.
2302 When printed, the output routines know to print the reference with the
2303 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2304 if (disp)
2305 {
2306 if (GET_CODE (disp) == SYMBOL_REF
2307 && CONSTANT_POOL_ADDRESS_P (disp)
2308 && !base
2309 && !indx)
2310 ;
2311
2312 else if (!CONSTANT_ADDRESS_P (disp))
2313 {
2314 ADDR_INVALID ("Displacement is not valid.\n", disp);
2315 return FALSE;
2316 }
2317
2318 else if (GET_CODE (disp) == CONST_DOUBLE)
2319 {
2320 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2321 return FALSE;
2322 }
2323
2324 else if (flag_pic && SYMBOLIC_CONST (disp)
2325 && base != pic_offset_table_rtx
2326 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2327 {
2328 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2329 return FALSE;
2330 }
2331
2332 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2333 && (base != NULL_RTX || indx != NULL_RTX))
2334 {
2335 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2336 return FALSE;
2337 }
2338 }
2339
2340 if (TARGET_DEBUG_ADDR)
2341 fprintf (stderr, "Address is valid.\n");
2342
2343 /* Everything looks valid, return true */
2344 return TRUE;
2345 }
2346
2347 \f
2348 /* Return a legitimate reference for ORIG (an address) using the
2349 register REG. If REG is 0, a new pseudo is generated.
2350
2351 There are three types of references that must be handled:
2352
2353 1. Global data references must load the address from the GOT, via
2354 the PIC reg. An insn is emitted to do this load, and the reg is
2355 returned.
2356
2357 2. Static data references must compute the address as an offset
2358 from the GOT, whose base is in the PIC reg. An insn is emitted to
2359 compute the address into a reg, and the reg is returned. Static
2360 data objects have SYMBOL_REF_FLAG set to differentiate them from
2361 global data objects.
2362
2363 3. Constant pool addresses must be handled special. They are
2364 considered legitimate addresses, but only if not used with regs.
2365 When printed, the output routines know to print the reference with the
2366 PIC reg, even though the PIC reg doesn't appear in the RTL.
2367
2368 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2369 reg also appears in the address (except for constant pool references,
2370 noted above).
2371
2372 "switch" statements also require special handling when generating
2373 PIC code. See comments by the `casesi' insn in i386.md for details. */
2374
2375 rtx
2376 legitimize_pic_address (orig, reg)
2377 rtx orig;
2378 rtx reg;
2379 {
2380 rtx addr = orig;
2381 rtx new = orig;
2382
2383 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2384 {
2385 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2386 reg = new = orig;
2387 else
2388 {
2389 if (reg == 0)
2390 reg = gen_reg_rtx (Pmode);
2391
2392 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2393 || GET_CODE (addr) == LABEL_REF)
2394 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2395 else
2396 new = gen_rtx (MEM, Pmode,
2397 gen_rtx (PLUS, Pmode,
2398 pic_offset_table_rtx, orig));
2399
2400 emit_move_insn (reg, new);
2401 }
2402 current_function_uses_pic_offset_table = 1;
2403 return reg;
2404 }
2405 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2406 {
2407 rtx base;
2408
2409 if (GET_CODE (addr) == CONST)
2410 {
2411 addr = XEXP (addr, 0);
2412 if (GET_CODE (addr) != PLUS)
2413 abort ();
2414 }
2415
2416 if (XEXP (addr, 0) == pic_offset_table_rtx)
2417 return orig;
2418
2419 if (reg == 0)
2420 reg = gen_reg_rtx (Pmode);
2421
2422 base = legitimize_pic_address (XEXP (addr, 0), reg);
2423 addr = legitimize_pic_address (XEXP (addr, 1),
2424 base == reg ? NULL_RTX : reg);
2425
2426 if (GET_CODE (addr) == CONST_INT)
2427 return plus_constant (base, INTVAL (addr));
2428
2429 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2430 {
2431 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2432 addr = XEXP (addr, 1);
2433 }
2434 return gen_rtx (PLUS, Pmode, base, addr);
2435 }
2436 return new;
2437 }
2438 \f
2439
2440 /* Emit insns to move operands[1] into operands[0]. */
2441
2442 void
2443 emit_pic_move (operands, mode)
2444 rtx *operands;
2445 enum machine_mode mode;
2446 {
2447 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2448
2449 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2450 operands[1] = (rtx) force_reg (SImode, operands[1]);
2451 else
2452 operands[1] = legitimize_pic_address (operands[1], temp);
2453 }
2454
2455 \f
2456 /* Try machine-dependent ways of modifying an illegitimate address
2457 to be legitimate. If we find one, return the new, valid address.
2458 This macro is used in only one place: `memory_address' in explow.c.
2459
2460 OLDX is the address as it was before break_out_memory_refs was called.
2461 In some cases it is useful to look at this to decide what needs to be done.
2462
2463 MODE and WIN are passed so that this macro can use
2464 GO_IF_LEGITIMATE_ADDRESS.
2465
2466 It is always safe for this macro to do nothing. It exists to recognize
2467 opportunities to optimize the output.
2468
2469 For the 80386, we handle X+REG by loading X into a register R and
2470 using R+REG. R will go in a general reg and indexing will be used.
2471 However, if REG is a broken-out memory address or multiplication,
2472 nothing needs to be done because REG can certainly go in a general reg.
2473
2474 When -fpic is used, special handling is needed for symbolic references.
2475 See comments by legitimize_pic_address in i386.c for details. */
2476
2477 rtx
2478 legitimize_address (x, oldx, mode)
2479 register rtx x;
2480 register rtx oldx;
2481 enum machine_mode mode;
2482 {
2483 int changed = 0;
2484 unsigned log;
2485
2486 if (TARGET_DEBUG_ADDR)
2487 {
2488 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2489 debug_rtx (x);
2490 }
2491
2492 if (flag_pic && SYMBOLIC_CONST (x))
2493 return legitimize_pic_address (x, 0);
2494
2495 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2496 if (GET_CODE (x) == ASHIFT
2497 && GET_CODE (XEXP (x, 1)) == CONST_INT
2498 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2499 {
2500 changed = 1;
2501 x = gen_rtx (MULT, Pmode,
2502 force_reg (Pmode, XEXP (x, 0)),
2503 GEN_INT (1 << log));
2504 }
2505
2506 if (GET_CODE (x) == PLUS)
2507 {
2508 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2509 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2510 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2511 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2512 {
2513 changed = 1;
2514 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2515 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2516 GEN_INT (1 << log));
2517 }
2518
2519 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2520 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2521 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2522 {
2523 changed = 1;
2524 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2525 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2526 GEN_INT (1 << log));
2527 }
2528
2529 /* Put multiply first if it isn't already */
2530 if (GET_CODE (XEXP (x, 1)) == MULT)
2531 {
2532 rtx tmp = XEXP (x, 0);
2533 XEXP (x, 0) = XEXP (x, 1);
2534 XEXP (x, 1) = tmp;
2535 changed = 1;
2536 }
2537
2538 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2539 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2540 created by virtual register instantiation, register elimination, and
2541 similar optimizations. */
2542 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2543 {
2544 changed = 1;
2545 x = gen_rtx (PLUS, Pmode,
2546 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2547 XEXP (XEXP (x, 1), 1));
2548 }
2549
2550 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2551 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2552 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2553 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2554 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2555 && CONSTANT_P (XEXP (x, 1)))
2556 {
2557 rtx constant, other;
2558
2559 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2560 {
2561 constant = XEXP (x, 1);
2562 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2563 }
2564 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2565 {
2566 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2567 other = XEXP (x, 1);
2568 }
2569 else
2570 constant = 0;
2571
2572 if (constant)
2573 {
2574 changed = 1;
2575 x = gen_rtx (PLUS, Pmode,
2576 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2577 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2578 plus_constant (other, INTVAL (constant)));
2579 }
2580 }
2581
2582 if (changed && legitimate_address_p (mode, x, FALSE))
2583 return x;
2584
2585 if (GET_CODE (XEXP (x, 0)) == MULT)
2586 {
2587 changed = 1;
2588 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2589 }
2590
2591 if (GET_CODE (XEXP (x, 1)) == MULT)
2592 {
2593 changed = 1;
2594 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2595 }
2596
2597 if (changed
2598 && GET_CODE (XEXP (x, 1)) == REG
2599 && GET_CODE (XEXP (x, 0)) == REG)
2600 return x;
2601
2602 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2603 {
2604 changed = 1;
2605 x = legitimize_pic_address (x, 0);
2606 }
2607
2608 if (changed && legitimate_address_p (mode, x, FALSE))
2609 return x;
2610
2611 if (GET_CODE (XEXP (x, 0)) == REG)
2612 {
2613 register rtx temp = gen_reg_rtx (Pmode);
2614 register rtx val = force_operand (XEXP (x, 1), temp);
2615 if (val != temp)
2616 emit_move_insn (temp, val);
2617
2618 XEXP (x, 1) = temp;
2619 return x;
2620 }
2621
2622 else if (GET_CODE (XEXP (x, 1)) == REG)
2623 {
2624 register rtx temp = gen_reg_rtx (Pmode);
2625 register rtx val = force_operand (XEXP (x, 0), temp);
2626 if (val != temp)
2627 emit_move_insn (temp, val);
2628
2629 XEXP (x, 0) = temp;
2630 return x;
2631 }
2632 }
2633
2634 return x;
2635 }
2636
2637 \f
2638 /* Print an integer constant expression in assembler syntax. Addition
2639 and subtraction are the only arithmetic that may appear in these
2640 expressions. FILE is the stdio stream to write to, X is the rtx, and
2641 CODE is the operand print code from the output string. */
2642
2643 static void
2644 output_pic_addr_const (file, x, code)
2645 FILE *file;
2646 rtx x;
2647 int code;
2648 {
2649 char buf[256];
2650
2651 switch (GET_CODE (x))
2652 {
2653 case PC:
2654 if (flag_pic)
2655 putc ('.', file);
2656 else
2657 abort ();
2658 break;
2659
2660 case SYMBOL_REF:
2661 case LABEL_REF:
2662 if (GET_CODE (x) == SYMBOL_REF)
2663 assemble_name (file, XSTR (x, 0));
2664 else
2665 {
2666 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2667 CODE_LABEL_NUMBER (XEXP (x, 0)));
2668 assemble_name (asm_out_file, buf);
2669 }
2670
2671 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2672 fprintf (file, "@GOTOFF(%%ebx)");
2673 else if (code == 'P')
2674 fprintf (file, "@PLT");
2675 else if (GET_CODE (x) == LABEL_REF)
2676 fprintf (file, "@GOTOFF");
2677 else if (! SYMBOL_REF_FLAG (x))
2678 fprintf (file, "@GOT");
2679 else
2680 fprintf (file, "@GOTOFF");
2681
2682 break;
2683
2684 case CODE_LABEL:
2685 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2686 assemble_name (asm_out_file, buf);
2687 break;
2688
2689 case CONST_INT:
2690 fprintf (file, "%d", INTVAL (x));
2691 break;
2692
2693 case CONST:
2694 /* This used to output parentheses around the expression,
2695 but that does not work on the 386 (either ATT or BSD assembler). */
2696 output_pic_addr_const (file, XEXP (x, 0), code);
2697 break;
2698
2699 case CONST_DOUBLE:
2700 if (GET_MODE (x) == VOIDmode)
2701 {
2702 /* We can use %d if the number is <32 bits and positive. */
2703 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2704 fprintf (file, "0x%x%08x",
2705 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2706 else
2707 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2708 }
2709 else
2710 /* We can't handle floating point constants;
2711 PRINT_OPERAND must handle them. */
2712 output_operand_lossage ("floating constant misused");
2713 break;
2714
2715 case PLUS:
2716 /* Some assemblers need integer constants to appear last (eg masm). */
2717 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2718 {
2719 output_pic_addr_const (file, XEXP (x, 1), code);
2720 if (INTVAL (XEXP (x, 0)) >= 0)
2721 fprintf (file, "+");
2722 output_pic_addr_const (file, XEXP (x, 0), code);
2723 }
2724 else
2725 {
2726 output_pic_addr_const (file, XEXP (x, 0), code);
2727 if (INTVAL (XEXP (x, 1)) >= 0)
2728 fprintf (file, "+");
2729 output_pic_addr_const (file, XEXP (x, 1), code);
2730 }
2731 break;
2732
2733 case MINUS:
2734 output_pic_addr_const (file, XEXP (x, 0), code);
2735 fprintf (file, "-");
2736 output_pic_addr_const (file, XEXP (x, 1), code);
2737 break;
2738
2739 default:
2740 output_operand_lossage ("invalid expression as operand");
2741 }
2742 }
2743 \f
2744 /* Append the correct conditional move suffix which corresponds to CODE */
2745
2746 static void
2747 put_condition_code (code, mode, file)
2748 enum rtx_code code;
2749 enum mode_class mode;
2750 FILE * file;
2751 {
2752 if (mode == MODE_INT)
2753 switch (code)
2754 {
2755 case NE:
2756 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2757 fputs ("b", file);
2758 else
2759 fputs ("ne", file);
2760 return;
2761 case EQ:
2762 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2763 fputs ("ae", file);
2764 else
2765 fputs ("e", file);
2766 return;
2767 case GE:
2768 fputs ("ge", file); return;
2769 case GT:
2770 fputs ("g", file); return;
2771 case LE:
2772 fputs ("le", file); return;
2773 case LT:
2774 fputs ("l", file); return;
2775 case GEU:
2776 fputs ("ae", file); return;
2777 case GTU:
2778 fputs ("a", file); return;
2779 case LEU:
2780 fputs ("be", file); return;
2781 case LTU:
2782 fputs ("b", file); return;
2783 default: output_operand_lossage ("Invalid %%C operand");
2784 }
2785 else if (mode == MODE_FLOAT)
2786 switch (code)
2787 {
2788 case NE:
2789 fputs ("ne", file); return;
2790 case EQ:
2791 fputs ("e", file); return;
2792 case GE:
2793 fputs ("nb", file); return;
2794 case GT:
2795 fputs ("nbe", file); return;
2796 case LE:
2797 fputs ("be", file); return;
2798 case LT:
2799 fputs ("b", file); return;
2800 case GEU:
2801 fputs ("nb", file); return;
2802 case GTU:
2803 fputs ("nbe", file); return;
2804 case LEU:
2805 fputs ("be", file); return;
2806 case LTU:
2807 fputs ("b", file); return;
2808 default: output_operand_lossage ("Invalid %%C operand");
2809 }
2810 }
2811
2812 /* Meaning of CODE:
2813 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2814 C -- print opcode suffix for set/cmov insn.
2815 c -- like C, but print reversed condition
2816 F -- print opcode suffix for fcmov insn.
2817 f -- like C, but print reversed condition
2818 R -- print the prefix for register names.
2819 z -- print the opcode suffix for the size of the current operand.
2820 * -- print a star (in certain assembler syntax)
2821 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2822 c -- don't print special prefixes before constant operands.
2823 J -- print the appropriate jump operand.
2824 s -- print a shift double count, followed by the assemblers argument
2825 delimiter.
2826 b -- print the QImode name of the register for the indicated operand.
2827 %b0 would print %al if operands[0] is reg 0.
2828 w -- likewise, print the HImode name of the register.
2829 k -- likewise, print the SImode name of the register.
2830 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2831 y -- print "st(0)" instead of "st" as a register.
2832 P -- print as a PIC constant
2833 */
2834
2835 void
2836 print_operand (file, x, code)
2837 FILE *file;
2838 rtx x;
2839 int code;
2840 {
2841 if (code)
2842 {
2843 switch (code)
2844 {
2845 case '*':
2846 if (USE_STAR)
2847 putc ('*', file);
2848 return;
2849
2850 case 'L':
2851 PUT_OP_SIZE (code, 'l', file);
2852 return;
2853
2854 case 'W':
2855 PUT_OP_SIZE (code, 'w', file);
2856 return;
2857
2858 case 'B':
2859 PUT_OP_SIZE (code, 'b', file);
2860 return;
2861
2862 case 'Q':
2863 PUT_OP_SIZE (code, 'l', file);
2864 return;
2865
2866 case 'S':
2867 PUT_OP_SIZE (code, 's', file);
2868 return;
2869
2870 case 'T':
2871 PUT_OP_SIZE (code, 't', file);
2872 return;
2873
2874 case 'z':
2875 /* 387 opcodes don't get size suffixes if the operands are
2876 registers. */
2877
2878 if (STACK_REG_P (x))
2879 return;
2880
2881 /* this is the size of op from size of operand */
2882 switch (GET_MODE_SIZE (GET_MODE (x)))
2883 {
2884 case 1:
2885 PUT_OP_SIZE ('B', 'b', file);
2886 return;
2887
2888 case 2:
2889 PUT_OP_SIZE ('W', 'w', file);
2890 return;
2891
2892 case 4:
2893 if (GET_MODE (x) == SFmode)
2894 {
2895 PUT_OP_SIZE ('S', 's', file);
2896 return;
2897 }
2898 else
2899 PUT_OP_SIZE ('L', 'l', file);
2900 return;
2901
2902 case 12:
2903 PUT_OP_SIZE ('T', 't', file);
2904 return;
2905
2906 case 8:
2907 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2908 {
2909 #ifdef GAS_MNEMONICS
2910 PUT_OP_SIZE ('Q', 'q', file);
2911 return;
2912 #else
2913 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2914 #endif
2915 }
2916
2917 PUT_OP_SIZE ('Q', 'l', file);
2918 return;
2919 }
2920
2921 case 'b':
2922 case 'w':
2923 case 'k':
2924 case 'h':
2925 case 'y':
2926 case 'P':
2927 break;
2928
2929 case 'J':
2930 switch (GET_CODE (x))
2931 {
2932 /* These conditions are appropriate for testing the result
2933 of an arithmetic operation, not for a compare operation.
2934 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2935 CC_Z_IN_NOT_C false and not floating point. */
2936 case NE: fputs ("jne", file); return;
2937 case EQ: fputs ("je", file); return;
2938 case GE: fputs ("jns", file); return;
2939 case LT: fputs ("js", file); return;
2940 case GEU: fputs ("jmp", file); return;
2941 case GTU: fputs ("jne", file); return;
2942 case LEU: fputs ("je", file); return;
2943 case LTU: fputs ("#branch never", file); return;
2944
2945 /* no matching branches for GT nor LE */
2946 }
2947 abort ();
2948
2949 case 's':
2950 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2951 {
2952 PRINT_OPERAND (file, x, 0);
2953 fputs (AS2C (,) + 1, file);
2954 }
2955 return;
2956
2957 /* This is used by the conditional move instructions. */
2958 case 'C':
2959 put_condition_code (GET_CODE (x), MODE_INT, file);
2960 return;
2961
2962 /* like above, but reverse condition */
2963 case 'c':
2964 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
2965 return;
2966
2967 case 'F':
2968 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
2969 return;
2970
2971 /* like above, but reverse condition */
2972 case 'f':
2973 put_condition_code (reverse_condition (GET_CODE (x)),
2974 MODE_FLOAT, file);
2975 return;
2976
2977 default:
2978 {
2979 char str[50];
2980
2981 sprintf (str, "invalid operand code `%c'", code);
2982 output_operand_lossage (str);
2983 }
2984 }
2985 }
2986 if (GET_CODE (x) == REG)
2987 {
2988 PRINT_REG (x, code, file);
2989 }
2990 else if (GET_CODE (x) == MEM)
2991 {
2992 PRINT_PTR (x, file);
2993 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2994 {
2995 if (flag_pic)
2996 output_pic_addr_const (file, XEXP (x, 0), code);
2997 else
2998 output_addr_const (file, XEXP (x, 0));
2999 }
3000 else
3001 output_address (XEXP (x, 0));
3002 }
3003 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3004 {
3005 REAL_VALUE_TYPE r; long l;
3006 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3007 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3008 PRINT_IMMED_PREFIX (file);
3009 fprintf (file, "0x%x", l);
3010 }
3011 /* These float cases don't actually occur as immediate operands. */
3012 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3013 {
3014 REAL_VALUE_TYPE r; char dstr[30];
3015 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3016 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3017 fprintf (file, "%s", dstr);
3018 }
3019 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3020 {
3021 REAL_VALUE_TYPE r; char dstr[30];
3022 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3023 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3024 fprintf (file, "%s", dstr);
3025 }
3026 else
3027 {
3028 if (code != 'P')
3029 {
3030 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3031 PRINT_IMMED_PREFIX (file);
3032 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3033 || GET_CODE (x) == LABEL_REF)
3034 PRINT_OFFSET_PREFIX (file);
3035 }
3036 if (flag_pic)
3037 output_pic_addr_const (file, x, code);
3038 else
3039 output_addr_const (file, x);
3040 }
3041 }
3042 \f
3043 /* Print a memory operand whose address is ADDR. */
3044
3045 void
3046 print_operand_address (file, addr)
3047 FILE *file;
3048 register rtx addr;
3049 {
3050 register rtx reg1, reg2, breg, ireg;
3051 rtx offset;
3052
3053 switch (GET_CODE (addr))
3054 {
3055 case REG:
3056 ADDR_BEG (file);
3057 fprintf (file, "%se", RP);
3058 fputs (hi_reg_name[REGNO (addr)], file);
3059 ADDR_END (file);
3060 break;
3061
3062 case PLUS:
3063 reg1 = 0;
3064 reg2 = 0;
3065 ireg = 0;
3066 breg = 0;
3067 offset = 0;
3068 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3069 {
3070 offset = XEXP (addr, 0);
3071 addr = XEXP (addr, 1);
3072 }
3073 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3074 {
3075 offset = XEXP (addr, 1);
3076 addr = XEXP (addr, 0);
3077 }
3078 if (GET_CODE (addr) != PLUS) ;
3079 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3080 {
3081 reg1 = XEXP (addr, 0);
3082 addr = XEXP (addr, 1);
3083 }
3084 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3085 {
3086 reg1 = XEXP (addr, 1);
3087 addr = XEXP (addr, 0);
3088 }
3089 else if (GET_CODE (XEXP (addr, 0)) == REG)
3090 {
3091 reg1 = XEXP (addr, 0);
3092 addr = XEXP (addr, 1);
3093 }
3094 else if (GET_CODE (XEXP (addr, 1)) == REG)
3095 {
3096 reg1 = XEXP (addr, 1);
3097 addr = XEXP (addr, 0);
3098 }
3099 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3100 {
3101 if (reg1 == 0) reg1 = addr;
3102 else reg2 = addr;
3103 addr = 0;
3104 }
3105 if (offset != 0)
3106 {
3107 if (addr != 0) abort ();
3108 addr = offset;
3109 }
3110 if ((reg1 && GET_CODE (reg1) == MULT)
3111 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3112 {
3113 breg = reg2;
3114 ireg = reg1;
3115 }
3116 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3117 {
3118 breg = reg1;
3119 ireg = reg2;
3120 }
3121
3122 if (ireg != 0 || breg != 0)
3123 {
3124 int scale = 1;
3125
3126 if (addr != 0)
3127 {
3128 if (flag_pic)
3129 output_pic_addr_const (file, addr, 0);
3130
3131 else if (GET_CODE (addr) == LABEL_REF)
3132 output_asm_label (addr);
3133
3134 else
3135 output_addr_const (file, addr);
3136 }
3137
3138 if (ireg != 0 && GET_CODE (ireg) == MULT)
3139 {
3140 scale = INTVAL (XEXP (ireg, 1));
3141 ireg = XEXP (ireg, 0);
3142 }
3143
3144 /* The stack pointer can only appear as a base register,
3145 never an index register, so exchange the regs if it is wrong. */
3146
3147 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3148 {
3149 rtx tmp;
3150
3151 tmp = breg;
3152 breg = ireg;
3153 ireg = tmp;
3154 }
3155
3156 /* output breg+ireg*scale */
3157 PRINT_B_I_S (breg, ireg, scale, file);
3158 break;
3159 }
3160
3161 case MULT:
3162 {
3163 int scale;
3164 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3165 {
3166 scale = INTVAL (XEXP (addr, 0));
3167 ireg = XEXP (addr, 1);
3168 }
3169 else
3170 {
3171 scale = INTVAL (XEXP (addr, 1));
3172 ireg = XEXP (addr, 0);
3173 }
3174 output_addr_const (file, const0_rtx);
3175 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3176 }
3177 break;
3178
3179 default:
3180 if (GET_CODE (addr) == CONST_INT
3181 && INTVAL (addr) < 0x8000
3182 && INTVAL (addr) >= -0x8000)
3183 fprintf (file, "%d", INTVAL (addr));
3184 else
3185 {
3186 if (flag_pic)
3187 output_pic_addr_const (file, addr, 0);
3188 else
3189 output_addr_const (file, addr);
3190 }
3191 }
3192 }
3193 \f
3194 /* Set the cc_status for the results of an insn whose pattern is EXP.
3195 On the 80386, we assume that only test and compare insns, as well
3196 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3197 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3198 Also, we assume that jumps, moves and sCOND don't affect the condition
3199 codes. All else clobbers the condition codes, by assumption.
3200
3201 We assume that ALL integer add, minus, etc. instructions effect the
3202 condition codes. This MUST be consistent with i386.md.
3203
3204 We don't record any float test or compare - the redundant test &
3205 compare check in final.c does not handle stack-like regs correctly. */
3206
3207 void
3208 notice_update_cc (exp)
3209 rtx exp;
3210 {
3211 if (GET_CODE (exp) == SET)
3212 {
3213 /* Jumps do not alter the cc's. */
3214 if (SET_DEST (exp) == pc_rtx)
3215 return;
3216 #ifdef IS_STACK_MODE
3217 /* Moving into a memory of stack_mode may have been moved
3218 in between the use and set of cc0 by loop_spl(). So
3219 old value of cc.status must be retained */
3220 if(GET_CODE(SET_DEST(exp))==MEM
3221 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3222 {
3223 return;
3224 }
3225 #endif
3226 /* Moving register or memory into a register:
3227 it doesn't alter the cc's, but it might invalidate
3228 the RTX's which we remember the cc's came from.
3229 (Note that moving a constant 0 or 1 MAY set the cc's). */
3230 if (REG_P (SET_DEST (exp))
3231 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3232 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3233 {
3234 if (cc_status.value1
3235 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3236 cc_status.value1 = 0;
3237 if (cc_status.value2
3238 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3239 cc_status.value2 = 0;
3240 return;
3241 }
3242 /* Moving register into memory doesn't alter the cc's.
3243 It may invalidate the RTX's which we remember the cc's came from. */
3244 if (GET_CODE (SET_DEST (exp)) == MEM
3245 && (REG_P (SET_SRC (exp))
3246 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3247 {
3248 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3249 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3250 cc_status.value1 = 0;
3251 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3252 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3253 cc_status.value2 = 0;
3254 return;
3255 }
3256 /* Function calls clobber the cc's. */
3257 else if (GET_CODE (SET_SRC (exp)) == CALL)
3258 {
3259 CC_STATUS_INIT;
3260 return;
3261 }
3262 /* Tests and compares set the cc's in predictable ways. */
3263 else if (SET_DEST (exp) == cc0_rtx)
3264 {
3265 CC_STATUS_INIT;
3266 cc_status.value1 = SET_SRC (exp);
3267 return;
3268 }
3269 /* Certain instructions effect the condition codes. */
3270 else if (GET_MODE (SET_SRC (exp)) == SImode
3271 || GET_MODE (SET_SRC (exp)) == HImode
3272 || GET_MODE (SET_SRC (exp)) == QImode)
3273 switch (GET_CODE (SET_SRC (exp)))
3274 {
3275 case ASHIFTRT: case LSHIFTRT:
3276 case ASHIFT:
3277 /* Shifts on the 386 don't set the condition codes if the
3278 shift count is zero. */
3279 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3280 {
3281 CC_STATUS_INIT;
3282 break;
3283 }
3284 /* We assume that the CONST_INT is non-zero (this rtx would
3285 have been deleted if it were zero. */
3286
3287 case PLUS: case MINUS: case NEG:
3288 case AND: case IOR: case XOR:
3289 cc_status.flags = CC_NO_OVERFLOW;
3290 cc_status.value1 = SET_SRC (exp);
3291 cc_status.value2 = SET_DEST (exp);
3292 break;
3293
3294 default:
3295 CC_STATUS_INIT;
3296 }
3297 else
3298 {
3299 CC_STATUS_INIT;
3300 }
3301 }
3302 else if (GET_CODE (exp) == PARALLEL
3303 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3304 {
3305 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3306 return;
3307 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3308 {
3309 CC_STATUS_INIT;
3310 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3311 cc_status.flags |= CC_IN_80387;
3312 else
3313 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3314 return;
3315 }
3316 CC_STATUS_INIT;
3317 }
3318 else
3319 {
3320 CC_STATUS_INIT;
3321 }
3322 }
3323 \f
3324 /* Split one or more DImode RTL references into pairs of SImode
3325 references. The RTL can be REG, offsettable MEM, integer constant, or
3326 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3327 split and "num" is its length. lo_half and hi_half are output arrays
3328 that parallel "operands". */
3329
3330 void
3331 split_di (operands, num, lo_half, hi_half)
3332 rtx operands[];
3333 int num;
3334 rtx lo_half[], hi_half[];
3335 {
3336 while (num--)
3337 {
3338 if (GET_CODE (operands[num]) == REG)
3339 {
3340 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3341 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3342 }
3343 else if (CONSTANT_P (operands[num]))
3344 {
3345 split_double (operands[num], &lo_half[num], &hi_half[num]);
3346 }
3347 else if (offsettable_memref_p (operands[num]))
3348 {
3349 lo_half[num] = operands[num];
3350 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3351 }
3352 else
3353 abort();
3354 }
3355 }
3356 \f
3357 /* Return 1 if this is a valid binary operation on a 387.
3358 OP is the expression matched, and MODE is its mode. */
3359
3360 int
3361 binary_387_op (op, mode)
3362 register rtx op;
3363 enum machine_mode mode;
3364 {
3365 if (mode != VOIDmode && mode != GET_MODE (op))
3366 return 0;
3367
3368 switch (GET_CODE (op))
3369 {
3370 case PLUS:
3371 case MINUS:
3372 case MULT:
3373 case DIV:
3374 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3375
3376 default:
3377 return 0;
3378 }
3379 }
3380
3381 \f
3382 /* Return 1 if this is a valid shift or rotate operation on a 386.
3383 OP is the expression matched, and MODE is its mode. */
3384
3385 int
3386 shift_op (op, mode)
3387 register rtx op;
3388 enum machine_mode mode;
3389 {
3390 rtx operand = XEXP (op, 0);
3391
3392 if (mode != VOIDmode && mode != GET_MODE (op))
3393 return 0;
3394
3395 if (GET_MODE (operand) != GET_MODE (op)
3396 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3397 return 0;
3398
3399 return (GET_CODE (op) == ASHIFT
3400 || GET_CODE (op) == ASHIFTRT
3401 || GET_CODE (op) == LSHIFTRT
3402 || GET_CODE (op) == ROTATE
3403 || GET_CODE (op) == ROTATERT);
3404 }
3405
3406 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3407 MODE is not used. */
3408
3409 int
3410 VOIDmode_compare_op (op, mode)
3411 register rtx op;
3412 enum machine_mode mode;
3413 {
3414 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3415 }
3416 \f
3417 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3418 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3419 is the expression of the binary operation. The output may either be
3420 emitted here, or returned to the caller, like all output_* functions.
3421
3422 There is no guarantee that the operands are the same mode, as they
3423 might be within FLOAT or FLOAT_EXTEND expressions. */
3424
3425 char *
3426 output_387_binary_op (insn, operands)
3427 rtx insn;
3428 rtx *operands;
3429 {
3430 rtx temp;
3431 char *base_op;
3432 static char buf[100];
3433
3434 switch (GET_CODE (operands[3]))
3435 {
3436 case PLUS:
3437 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3438 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3439 base_op = "fiadd";
3440 else
3441 base_op = "fadd";
3442 break;
3443
3444 case MINUS:
3445 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3446 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3447 base_op = "fisub";
3448 else
3449 base_op = "fsub";
3450 break;
3451
3452 case MULT:
3453 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3454 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3455 base_op = "fimul";
3456 else
3457 base_op = "fmul";
3458 break;
3459
3460 case DIV:
3461 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3462 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3463 base_op = "fidiv";
3464 else
3465 base_op = "fdiv";
3466 break;
3467
3468 default:
3469 abort ();
3470 }
3471
3472 strcpy (buf, base_op);
3473
3474 switch (GET_CODE (operands[3]))
3475 {
3476 case MULT:
3477 case PLUS:
3478 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3479 {
3480 temp = operands[2];
3481 operands[2] = operands[1];
3482 operands[1] = temp;
3483 }
3484
3485 if (GET_CODE (operands[2]) == MEM)
3486 return strcat (buf, AS1 (%z2,%2));
3487
3488 if (NON_STACK_REG_P (operands[1]))
3489 {
3490 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3491 RET;
3492 }
3493 else if (NON_STACK_REG_P (operands[2]))
3494 {
3495 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3496 RET;
3497 }
3498
3499 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3500 return strcat (buf, AS2 (p,%2,%0));
3501
3502 if (STACK_TOP_P (operands[0]))
3503 return strcat (buf, AS2C (%y2,%0));
3504 else
3505 return strcat (buf, AS2C (%2,%0));
3506
3507 case MINUS:
3508 case DIV:
3509 if (GET_CODE (operands[1]) == MEM)
3510 return strcat (buf, AS1 (r%z1,%1));
3511
3512 if (GET_CODE (operands[2]) == MEM)
3513 return strcat (buf, AS1 (%z2,%2));
3514
3515 if (NON_STACK_REG_P (operands[1]))
3516 {
3517 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3518 RET;
3519 }
3520 else if (NON_STACK_REG_P (operands[2]))
3521 {
3522 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3523 RET;
3524 }
3525
3526 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3527 abort ();
3528
3529 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3530 return strcat (buf, AS2 (rp,%2,%0));
3531
3532 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3533 return strcat (buf, AS2 (p,%1,%0));
3534
3535 if (STACK_TOP_P (operands[0]))
3536 {
3537 if (STACK_TOP_P (operands[1]))
3538 return strcat (buf, AS2C (%y2,%0));
3539 else
3540 return strcat (buf, AS2 (r,%y1,%0));
3541 }
3542 else if (STACK_TOP_P (operands[1]))
3543 return strcat (buf, AS2C (%1,%0));
3544 else
3545 return strcat (buf, AS2 (r,%2,%0));
3546
3547 default:
3548 abort ();
3549 }
3550 }
3551 \f
3552 /* Output code for INSN to convert a float to a signed int. OPERANDS
3553 are the insn operands. The output may be SFmode or DFmode and the
3554 input operand may be SImode or DImode. As a special case, make sure
3555 that the 387 stack top dies if the output mode is DImode, because the
3556 hardware requires this. */
3557
3558 char *
3559 output_fix_trunc (insn, operands)
3560 rtx insn;
3561 rtx *operands;
3562 {
3563 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3564 rtx xops[2];
3565
3566 if (! STACK_TOP_P (operands[1]) ||
3567 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3568 abort ();
3569
3570 xops[0] = GEN_INT (12);
3571 xops[1] = operands[4];
3572
3573 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3574 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3575 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3576 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3577 output_asm_insn (AS1 (fldc%W3,%3), operands);
3578
3579 if (NON_STACK_REG_P (operands[0]))
3580 output_to_reg (operands[0], stack_top_dies);
3581 else if (GET_CODE (operands[0]) == MEM)
3582 {
3583 if (stack_top_dies)
3584 output_asm_insn (AS1 (fistp%z0,%0), operands);
3585 else
3586 output_asm_insn (AS1 (fist%z0,%0), operands);
3587 }
3588 else
3589 abort ();
3590
3591 return AS1 (fldc%W2,%2);
3592 }
3593 \f
3594 /* Output code for INSN to compare OPERANDS. The two operands might
3595 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3596 expression. If the compare is in mode CCFPEQmode, use an opcode that
3597 will not fault if a qNaN is present. */
3598
3599 char *
3600 output_float_compare (insn, operands)
3601 rtx insn;
3602 rtx *operands;
3603 {
3604 int stack_top_dies;
3605 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3606 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3607 rtx tmp;
3608
3609 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3610 {
3611 cc_status.flags |= CC_FCOMI;
3612 cc_prev_status.flags &= ~CC_TEST_AX;
3613 }
3614
3615 if (! STACK_TOP_P (operands[0]))
3616 {
3617 tmp = operands[0];
3618 operands[0] = operands[1];
3619 operands[1] = tmp;
3620 cc_status.flags |= CC_REVERSED;
3621 }
3622
3623 if (! STACK_TOP_P (operands[0]))
3624 abort ();
3625
3626 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3627
3628 if (STACK_REG_P (operands[1])
3629 && stack_top_dies
3630 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3631 && REGNO (operands[1]) != FIRST_STACK_REG)
3632 {
3633 /* If both the top of the 387 stack dies, and the other operand
3634 is also a stack register that dies, then this must be a
3635 `fcompp' float compare */
3636
3637 if (unordered_compare)
3638 output_asm_insn ("fucompp", operands);
3639 else
3640 output_asm_insn ("fcompp", operands);
3641 }
3642 else
3643 {
3644 static char buf[100];
3645
3646 /* Decide if this is the integer or float compare opcode, or the
3647 unordered float compare. */
3648
3649 if (unordered_compare)
3650 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3651 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3652 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3653 else
3654 strcpy (buf, "ficom");
3655
3656 /* Modify the opcode if the 387 stack is to be popped. */
3657
3658 if (stack_top_dies)
3659 strcat (buf, "p");
3660
3661 if (NON_STACK_REG_P (operands[1]))
3662 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3663 else if (cc_status.flags & CC_FCOMI)
3664 {
3665 rtx xops[3];
3666
3667 xops[0] = operands[0];
3668 xops[1] = operands[1];
3669 xops[2] = operands[0];
3670
3671 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%2)), xops);
3672 RET;
3673 }
3674 else
3675 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3676 }
3677
3678 /* Now retrieve the condition code. */
3679
3680 return output_fp_cc0_set (insn);
3681 }
3682 \f
3683 /* Output opcodes to transfer the results of FP compare or test INSN
3684 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3685 result of the compare or test is unordered, no comparison operator
3686 succeeds except NE. Return an output template, if any. */
3687
3688 char *
3689 output_fp_cc0_set (insn)
3690 rtx insn;
3691 {
3692 rtx xops[3];
3693 rtx unordered_label;
3694 rtx next;
3695 enum rtx_code code;
3696
3697 xops[0] = gen_rtx (REG, HImode, 0);
3698 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3699
3700 if (! TARGET_IEEE_FP)
3701 {
3702 if (!(cc_status.flags & CC_REVERSED))
3703 {
3704 next = next_cc0_user (insn);
3705
3706 if (GET_CODE (next) == JUMP_INSN
3707 && GET_CODE (PATTERN (next)) == SET
3708 && SET_DEST (PATTERN (next)) == pc_rtx
3709 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3710 {
3711 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3712 }
3713 else if (GET_CODE (PATTERN (next)) == SET)
3714 {
3715 code = GET_CODE (SET_SRC (PATTERN (next)));
3716 }
3717 else
3718 {
3719 return "sahf";
3720 }
3721 if (code == GT || code == LT || code == EQ || code == NE
3722 || code == LE || code == GE)
3723 { /* We will test eax directly */
3724 cc_status.flags |= CC_TEST_AX;
3725 RET;
3726 }
3727 }
3728 return "sahf";
3729 }
3730
3731 next = next_cc0_user (insn);
3732 if (next == NULL_RTX)
3733 abort ();
3734
3735 if (GET_CODE (next) == JUMP_INSN
3736 && GET_CODE (PATTERN (next)) == SET
3737 && SET_DEST (PATTERN (next)) == pc_rtx
3738 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3739 {
3740 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3741 }
3742 else if (GET_CODE (PATTERN (next)) == SET)
3743 {
3744 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3745 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3746 else code = GET_CODE (SET_SRC (PATTERN (next)));
3747 }
3748 else if (GET_CODE (PATTERN (next)) == PARALLEL
3749 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3750 {
3751 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3752 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3753 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3754 }
3755 else
3756 abort ();
3757
3758 xops[0] = gen_rtx (REG, QImode, 0);
3759
3760 switch (code)
3761 {
3762 case GT:
3763 xops[1] = GEN_INT (0x45);
3764 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3765 /* je label */
3766 break;
3767
3768 case LT:
3769 xops[1] = GEN_INT (0x45);
3770 xops[2] = GEN_INT (0x01);
3771 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3772 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3773 /* je label */
3774 break;
3775
3776 case GE:
3777 xops[1] = GEN_INT (0x05);
3778 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3779 /* je label */
3780 break;
3781
3782 case LE:
3783 xops[1] = GEN_INT (0x45);
3784 xops[2] = GEN_INT (0x40);
3785 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3786 output_asm_insn (AS1 (dec%B0,%h0), xops);
3787 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3788 /* jb label */
3789 break;
3790
3791 case EQ:
3792 xops[1] = GEN_INT (0x45);
3793 xops[2] = GEN_INT (0x40);
3794 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3795 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3796 /* je label */
3797 break;
3798
3799 case NE:
3800 xops[1] = GEN_INT (0x44);
3801 xops[2] = GEN_INT (0x40);
3802 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3803 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3804 /* jne label */
3805 break;
3806
3807 case GTU:
3808 case LTU:
3809 case GEU:
3810 case LEU:
3811 default:
3812 abort ();
3813 }
3814 RET;
3815 }
3816 \f
3817 #define MAX_386_STACK_LOCALS 2
3818
3819 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3820
3821 /* Define the structure for the machine field in struct function. */
3822 struct machine_function
3823 {
3824 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3825 };
3826
3827 /* Functions to save and restore i386_stack_locals.
3828 These will be called, via pointer variables,
3829 from push_function_context and pop_function_context. */
3830
3831 void
3832 save_386_machine_status (p)
3833 struct function *p;
3834 {
3835 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3836 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3837 sizeof i386_stack_locals);
3838 }
3839
3840 void
3841 restore_386_machine_status (p)
3842 struct function *p;
3843 {
3844 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3845 sizeof i386_stack_locals);
3846 free (p->machine);
3847 }
3848
3849 /* Clear stack slot assignments remembered from previous functions.
3850 This is called from INIT_EXPANDERS once before RTL is emitted for each
3851 function. */
3852
3853 void
3854 clear_386_stack_locals ()
3855 {
3856 enum machine_mode mode;
3857 int n;
3858
3859 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3860 mode = (enum machine_mode) ((int) mode + 1))
3861 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3862 i386_stack_locals[(int) mode][n] = NULL_RTX;
3863
3864 /* Arrange to save and restore i386_stack_locals around nested functions. */
3865 save_machine_status = save_386_machine_status;
3866 restore_machine_status = restore_386_machine_status;
3867 }
3868
3869 /* Return a MEM corresponding to a stack slot with mode MODE.
3870 Allocate a new slot if necessary.
3871
3872 The RTL for a function can have several slots available: N is
3873 which slot to use. */
3874
3875 rtx
3876 assign_386_stack_local (mode, n)
3877 enum machine_mode mode;
3878 int n;
3879 {
3880 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3881 abort ();
3882
3883 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3884 i386_stack_locals[(int) mode][n]
3885 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3886
3887 return i386_stack_locals[(int) mode][n];
3888 }
3889
3890 \f
3891 int is_mul(op,mode)
3892 register rtx op;
3893 enum machine_mode mode;
3894 {
3895 return (GET_CODE (op) == MULT);
3896 }
3897
3898 int is_div(op,mode)
3899 register rtx op;
3900 enum machine_mode mode;
3901 {
3902 return (GET_CODE (op) == DIV);
3903 }
3904
3905 \f
3906 #ifdef NOTYET
3907 /* Create a new copy of an rtx.
3908 Recursively copies the operands of the rtx,
3909 except for those few rtx codes that are sharable.
3910 Doesn't share CONST */
3911
3912 rtx
3913 copy_all_rtx (orig)
3914 register rtx orig;
3915 {
3916 register rtx copy;
3917 register int i, j;
3918 register RTX_CODE code;
3919 register char *format_ptr;
3920
3921 code = GET_CODE (orig);
3922
3923 switch (code)
3924 {
3925 case REG:
3926 case QUEUED:
3927 case CONST_INT:
3928 case CONST_DOUBLE:
3929 case SYMBOL_REF:
3930 case CODE_LABEL:
3931 case PC:
3932 case CC0:
3933 case SCRATCH:
3934 /* SCRATCH must be shared because they represent distinct values. */
3935 return orig;
3936
3937 #if 0
3938 case CONST:
3939 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3940 a LABEL_REF, it isn't sharable. */
3941 if (GET_CODE (XEXP (orig, 0)) == PLUS
3942 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3943 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3944 return orig;
3945 break;
3946 #endif
3947 /* A MEM with a constant address is not sharable. The problem is that
3948 the constant address may need to be reloaded. If the mem is shared,
3949 then reloading one copy of this mem will cause all copies to appear
3950 to have been reloaded. */
3951 }
3952
3953 copy = rtx_alloc (code);
3954 PUT_MODE (copy, GET_MODE (orig));
3955 copy->in_struct = orig->in_struct;
3956 copy->volatil = orig->volatil;
3957 copy->unchanging = orig->unchanging;
3958 copy->integrated = orig->integrated;
3959 /* intel1 */
3960 copy->is_spill_rtx = orig->is_spill_rtx;
3961
3962 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3963
3964 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3965 {
3966 switch (*format_ptr++)
3967 {
3968 case 'e':
3969 XEXP (copy, i) = XEXP (orig, i);
3970 if (XEXP (orig, i) != NULL)
3971 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3972 break;
3973
3974 case '0':
3975 case 'u':
3976 XEXP (copy, i) = XEXP (orig, i);
3977 break;
3978
3979 case 'E':
3980 case 'V':
3981 XVEC (copy, i) = XVEC (orig, i);
3982 if (XVEC (orig, i) != NULL)
3983 {
3984 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3985 for (j = 0; j < XVECLEN (copy, i); j++)
3986 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3987 }
3988 break;
3989
3990 case 'w':
3991 XWINT (copy, i) = XWINT (orig, i);
3992 break;
3993
3994 case 'i':
3995 XINT (copy, i) = XINT (orig, i);
3996 break;
3997
3998 case 's':
3999 case 'S':
4000 XSTR (copy, i) = XSTR (orig, i);
4001 break;
4002
4003 default:
4004 abort ();
4005 }
4006 }
4007 return copy;
4008 }
4009
4010 \f
4011 /* try to rewrite a memory address to make it valid */
4012 void
4013 rewrite_address (mem_rtx)
4014 rtx mem_rtx;
4015 {
4016 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4017 int scale = 1;
4018 int offset_adjust = 0;
4019 int was_only_offset = 0;
4020 rtx mem_addr = XEXP (mem_rtx, 0);
4021 char *storage = (char *) oballoc (0);
4022 int in_struct = 0;
4023 int is_spill_rtx = 0;
4024
4025 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4026 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4027
4028 if (GET_CODE (mem_addr) == PLUS &&
4029 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4030 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4031 { /* this part is utilized by the combiner */
4032 ret_rtx =
4033 gen_rtx (PLUS, GET_MODE (mem_addr),
4034 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4035 XEXP (mem_addr, 0),
4036 XEXP (XEXP (mem_addr, 1), 0)),
4037 XEXP (XEXP (mem_addr, 1), 1));
4038 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4039 {
4040 XEXP (mem_rtx, 0) = ret_rtx;
4041 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4042 return;
4043 }
4044 obfree (storage);
4045 }
4046
4047 /* this part is utilized by loop.c */
4048 /* If the address contains PLUS (reg,const) and this pattern is invalid
4049 in this case - try to rewrite the address to make it valid intel1
4050 */
4051 storage = (char *) oballoc (0);
4052 index_rtx = base_rtx = offset_rtx = NULL;
4053 /* find the base index and offset elements of the memory address */
4054 if (GET_CODE (mem_addr) == PLUS)
4055 {
4056 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4057 {
4058 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4059 {
4060 base_rtx = XEXP (mem_addr, 1);
4061 index_rtx = XEXP (mem_addr, 0);
4062 }
4063 else
4064 {
4065 base_rtx = XEXP (mem_addr, 0);
4066 offset_rtx = XEXP (mem_addr, 1);
4067 }
4068 }
4069 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4070 {
4071 index_rtx = XEXP (mem_addr, 0);
4072 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4073 {
4074 base_rtx = XEXP (mem_addr, 1);
4075 }
4076 else
4077 {
4078 offset_rtx = XEXP (mem_addr, 1);
4079 }
4080 }
4081 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4082 {
4083 /* intel1 */
4084 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4085 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4086 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4087 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4088 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4089 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4090 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4091 {
4092 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4093 offset_rtx = XEXP (mem_addr, 1);
4094 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4095 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4096 }
4097 else
4098 {
4099 offset_rtx = XEXP (mem_addr, 1);
4100 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4101 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4102 }
4103 }
4104 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4105 {
4106 was_only_offset = 1;
4107 index_rtx = NULL;
4108 base_rtx = NULL;
4109 offset_rtx = XEXP (mem_addr, 1);
4110 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4111 if (offset_adjust == 0)
4112 {
4113 XEXP (mem_rtx, 0) = offset_rtx;
4114 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4115 return;
4116 }
4117 }
4118 else
4119 {
4120 obfree (storage);
4121 return;
4122 }
4123 }
4124 else if (GET_CODE (mem_addr) == MULT)
4125 {
4126 index_rtx = mem_addr;
4127 }
4128 else
4129 {
4130 obfree (storage);
4131 return;
4132 }
4133 if (index_rtx && GET_CODE (index_rtx) == MULT)
4134 {
4135 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4136 {
4137 obfree (storage);
4138 return;
4139 }
4140 scale_rtx = XEXP (index_rtx, 1);
4141 scale = INTVAL (scale_rtx);
4142 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4143 }
4144 /* now find which of the elements are invalid and try to fix them */
4145 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4146 {
4147 offset_adjust = INTVAL (index_rtx) * scale;
4148 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4149 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4150 {
4151 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4152 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4153 {
4154 offset_rtx = copy_all_rtx (offset_rtx);
4155 XEXP (XEXP (offset_rtx, 0), 1) =
4156 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4157 if (!CONSTANT_P (offset_rtx))
4158 {
4159 obfree (storage);
4160 return;
4161 }
4162 }
4163 }
4164 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4165 {
4166 offset_rtx =
4167 gen_rtx (CONST, GET_MODE (offset_rtx),
4168 gen_rtx (PLUS, GET_MODE (offset_rtx),
4169 offset_rtx,
4170 gen_rtx (CONST_INT, 0, offset_adjust)));
4171 if (!CONSTANT_P (offset_rtx))
4172 {
4173 obfree (storage);
4174 return;
4175 }
4176 }
4177 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4178 {
4179 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4180 }
4181 else if (!offset_rtx)
4182 {
4183 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4184 }
4185 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4186 XEXP (mem_rtx, 0) = offset_rtx;
4187 return;
4188 }
4189 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4190 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4191 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4192 {
4193 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4194 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4195 }
4196 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4197 {
4198 offset_adjust += INTVAL (base_rtx);
4199 base_rtx = NULL;
4200 }
4201 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4202 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4203 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4204 {
4205 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4206 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4207 }
4208 if (index_rtx)
4209 {
4210 if (!LEGITIMATE_INDEX_P (index_rtx)
4211 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4212 {
4213 obfree (storage);
4214 return;
4215 }
4216 }
4217 if (base_rtx)
4218 {
4219 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4220 {
4221 obfree (storage);
4222 return;
4223 }
4224 }
4225 if (offset_adjust != 0)
4226 {
4227 if (offset_rtx)
4228 {
4229 if (GET_CODE (offset_rtx) == CONST &&
4230 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4231 {
4232 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4233 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4234 {
4235 offset_rtx = copy_all_rtx (offset_rtx);
4236 XEXP (XEXP (offset_rtx, 0), 1) =
4237 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4238 if (!CONSTANT_P (offset_rtx))
4239 {
4240 obfree (storage);
4241 return;
4242 }
4243 }
4244 }
4245 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4246 {
4247 offset_rtx =
4248 gen_rtx (CONST, GET_MODE (offset_rtx),
4249 gen_rtx (PLUS, GET_MODE (offset_rtx),
4250 offset_rtx,
4251 gen_rtx (CONST_INT, 0, offset_adjust)));
4252 if (!CONSTANT_P (offset_rtx))
4253 {
4254 obfree (storage);
4255 return;
4256 }
4257 }
4258 else if (GET_CODE (offset_rtx) == CONST_INT)
4259 {
4260 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4261 }
4262 else
4263 {
4264 obfree (storage);
4265 return;
4266 }
4267 }
4268 else
4269 {
4270 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4271 }
4272 if (index_rtx)
4273 {
4274 if (base_rtx)
4275 {
4276 if (scale != 1)
4277 {
4278 if (GET_CODE (offset_rtx) == CONST_INT &&
4279 INTVAL (offset_rtx) == 0)
4280 {
4281 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4282 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4283 scale_rtx),
4284 base_rtx);
4285 }
4286 else
4287 {
4288 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4289 gen_rtx (PLUS, GET_MODE (base_rtx),
4290 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4291 scale_rtx),
4292 base_rtx),
4293 offset_rtx);
4294 }
4295 }
4296 else
4297 {
4298 if (GET_CODE (offset_rtx) == CONST_INT &&
4299 INTVAL (offset_rtx) == 0)
4300 {
4301 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4302 }
4303 else
4304 {
4305 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4306 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4307 base_rtx),
4308 offset_rtx);
4309 }
4310 }
4311 }
4312 else
4313 {
4314 if (scale != 1)
4315 {
4316 if (GET_CODE (offset_rtx) == CONST_INT &&
4317 INTVAL (offset_rtx) == 0)
4318 {
4319 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4320 }
4321 else
4322 {
4323 ret_rtx =
4324 gen_rtx (PLUS, GET_MODE (offset_rtx),
4325 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4326 scale_rtx),
4327 offset_rtx);
4328 }
4329 }
4330 else
4331 {
4332 if (GET_CODE (offset_rtx) == CONST_INT &&
4333 INTVAL (offset_rtx) == 0)
4334 {
4335 ret_rtx = index_rtx;
4336 }
4337 else
4338 {
4339 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4340 }
4341 }
4342 }
4343 }
4344 else
4345 {
4346 if (base_rtx)
4347 {
4348 if (GET_CODE (offset_rtx) == CONST_INT &&
4349 INTVAL (offset_rtx) == 0)
4350 {
4351 ret_rtx = base_rtx;
4352 }
4353 else
4354 {
4355 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4356 }
4357 }
4358 else if (was_only_offset)
4359 {
4360 ret_rtx = offset_rtx;
4361 }
4362 else
4363 {
4364 obfree (storage);
4365 return;
4366 }
4367 }
4368 XEXP (mem_rtx, 0) = ret_rtx;
4369 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4370 return;
4371 }
4372 else
4373 {
4374 obfree (storage);
4375 return;
4376 }
4377 }
4378 #endif /* NOTYET */
4379
4380 \f
4381 /* return 1 if the first insn to set cc before insn also sets the register
4382 reg_rtx - otherwise return 0 */
4383 int
4384 last_to_set_cc (reg_rtx, insn)
4385 rtx reg_rtx, insn;
4386 {
4387 rtx prev_insn = PREV_INSN (insn);
4388
4389 while (prev_insn)
4390 {
4391 if (GET_CODE (prev_insn) == NOTE)
4392 ;
4393
4394 else if (GET_CODE (prev_insn) == INSN)
4395 {
4396 if (GET_CODE (PATTERN (prev_insn)) != SET)
4397 return (0);
4398
4399 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4400 {
4401 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4402 return (1);
4403
4404 return (0);
4405 }
4406
4407 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4408 return (0);
4409 }
4410
4411 else
4412 return (0);
4413
4414 prev_insn = PREV_INSN (prev_insn);
4415 }
4416
4417 return (0);
4418 }
4419
4420 \f
4421 int
4422 doesnt_set_condition_code (pat)
4423 rtx pat;
4424 {
4425 switch (GET_CODE (pat))
4426 {
4427 case MEM:
4428 case REG:
4429 return (1);
4430
4431 default:
4432 return (0);
4433
4434 }
4435 }
4436
4437 \f
4438 int
4439 sets_condition_code (pat)
4440 rtx pat;
4441 {
4442 switch (GET_CODE (pat))
4443 {
4444 case PLUS:
4445 case MINUS:
4446 case AND:
4447 case IOR:
4448 case XOR:
4449 case NOT:
4450 case NEG:
4451 case MULT:
4452 case DIV:
4453 case MOD:
4454 case UDIV:
4455 case UMOD:
4456 return (1);
4457
4458 default:
4459 return (0);
4460
4461 }
4462 }
4463
4464 \f
4465 int
4466 str_immediate_operand (op, mode)
4467 register rtx op;
4468 enum machine_mode mode;
4469 {
4470 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4471 {
4472 return (1);
4473 }
4474 return (0);
4475 }
4476
4477 \f
4478 int
4479 is_fp_insn (insn)
4480 rtx insn;
4481 {
4482 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4483 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4484 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4485 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4486 {
4487 return (1);
4488 }
4489
4490 return (0);
4491 }
4492
4493 /*
4494 Return 1 if the mode of the SET_DEST of insn is floating point
4495 and it is not an fld or a move from memory to memory.
4496 Otherwise return 0 */
4497 int
4498 is_fp_dest (insn)
4499 rtx insn;
4500 {
4501 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4502 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4503 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4504 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4505 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4506 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4507 && GET_CODE (SET_SRC (insn)) != MEM)
4508 {
4509 return (1);
4510 }
4511
4512 return (0);
4513 }
4514
4515 /*
4516 Return 1 if the mode of the SET_DEST floating point and is memory
4517 and the source is a register.
4518 */
4519 int
4520 is_fp_store (insn)
4521 rtx insn;
4522 {
4523 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4524 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4525 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4526 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4527 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4528 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4529 {
4530 return (1);
4531 }
4532
4533 return (0);
4534 }
4535
4536 \f
4537 /*
4538 Return 1 if dep_insn sets a register which insn uses as a base
4539 or index to reference memory.
4540 otherwise return 0 */
4541
4542 int
4543 agi_dependent (insn, dep_insn)
4544 rtx insn, dep_insn;
4545 {
4546 if (GET_CODE (dep_insn) == INSN
4547 && GET_CODE (PATTERN (dep_insn)) == SET
4548 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4549 {
4550 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4551 }
4552
4553 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4554 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4555 && push_operand (SET_DEST (PATTERN (dep_insn)),
4556 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4557 {
4558 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4559 }
4560
4561 return (0);
4562 }
4563
4564 \f
4565 /*
4566 Return 1 if reg is used in rtl as a base or index for a memory ref
4567 otherwise return 0. */
4568
4569 int
4570 reg_mentioned_in_mem (reg, rtl)
4571 rtx reg, rtl;
4572 {
4573 register char *fmt;
4574 register int i;
4575 register enum rtx_code code;
4576
4577 if (rtl == NULL)
4578 return (0);
4579
4580 code = GET_CODE (rtl);
4581
4582 switch (code)
4583 {
4584 case HIGH:
4585 case CONST_INT:
4586 case CONST:
4587 case CONST_DOUBLE:
4588 case SYMBOL_REF:
4589 case LABEL_REF:
4590 case PC:
4591 case CC0:
4592 case SUBREG:
4593 return (0);
4594
4595
4596 }
4597
4598 if (code == MEM && reg_mentioned_p (reg, rtl))
4599 return (1);
4600
4601 fmt = GET_RTX_FORMAT (code);
4602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4603 {
4604 if (fmt[i] == 'E')
4605 {
4606 register int j;
4607 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4608 {
4609 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4610 return 1;
4611 }
4612 }
4613
4614 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4615 return 1;
4616 }
4617
4618 return (0);
4619 }
4620 \f
4621 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4622
4623 operands[0] = result, initialized with the startaddress
4624 operands[1] = alignment of the address.
4625 operands[2] = scratch register, initialized with the startaddress when
4626 not aligned, otherwise undefined
4627
4628 This is just the body. It needs the initialisations mentioned above and
4629 some address computing at the end. These things are done in i386.md. */
4630
4631 char *
4632 output_strlen_unroll (operands)
4633 rtx operands[];
4634 {
4635 rtx xops[18];
4636
4637 xops[0] = operands[0]; /* Result */
4638 /* operands[1]; * Alignment */
4639 xops[1] = operands[2]; /* Scratch */
4640 xops[2] = GEN_INT (0);
4641 xops[3] = GEN_INT (2);
4642 xops[4] = GEN_INT (3);
4643 xops[5] = GEN_INT (4);
4644 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4645 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4646 xops[8] = gen_label_rtx (); /* label of main loop */
4647 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4648 xops[9] = gen_label_rtx (); /* pentium optimisation */
4649 xops[10] = gen_label_rtx (); /* end label 2 */
4650 xops[11] = gen_label_rtx (); /* end label 1 */
4651 xops[12] = gen_label_rtx (); /* end label */
4652 /* xops[13] * Temporary used */
4653 xops[14] = GEN_INT (0xff);
4654 xops[15] = GEN_INT (0xff00);
4655 xops[16] = GEN_INT (0xff0000);
4656 xops[17] = GEN_INT (0xff000000);
4657
4658 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4659
4660 /* is there a known alignment and is it less then 4 */
4661 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4662 {
4663 /* is there a known alignment and is it not 2 */
4664 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4665 {
4666 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4667 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4668
4669 /* leave just the 3 lower bits */
4670 /* if this is a q-register, then the high part is used later */
4671 /* therefore user andl rather than andb */
4672 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4673 /* is aligned to 4-byte adress when zero */
4674 output_asm_insn (AS1 (je,%l8), xops);
4675 /* side-effect even Parity when %eax == 3 */
4676 output_asm_insn (AS1 (jp,%6), xops);
4677
4678 /* is it aligned to 2 bytes ? */
4679 if (QI_REG_P (xops[1]))
4680 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4681 else
4682 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4683 output_asm_insn (AS1 (je,%7), xops);
4684 }
4685 else
4686 {
4687 /* since the alignment is 2, we have to check 2 or 0 bytes */
4688
4689 /* check if is aligned to 4 - byte */
4690 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4691 /* is aligned to 4-byte adress when zero */
4692 output_asm_insn (AS1 (je,%l8), xops);
4693 }
4694
4695 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4696 /* now, compare the bytes */
4697 /* compare with the high part of a q-reg gives shorter code */
4698 if (QI_REG_P (xops[1]))
4699 {
4700 /* compare the first n unaligned byte on a byte per byte basis */
4701 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4702 /* when zero we reached the end */
4703 output_asm_insn (AS1 (je,%l12), xops);
4704 /* increment the address */
4705 output_asm_insn (AS1 (inc%L0,%0), xops);
4706
4707 /* not needed with an alignment of 2 */
4708 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4709 {
4710 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4711 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4712 output_asm_insn (AS1 (je,%l12), xops);
4713 output_asm_insn (AS1 (inc%L0,%0), xops);
4714
4715 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4716 }
4717 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4718 }
4719 else
4720 {
4721 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4722 output_asm_insn (AS1 (je,%l12), xops);
4723 output_asm_insn (AS1 (inc%L0,%0), xops);
4724
4725 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4726 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4727 output_asm_insn (AS1 (je,%l12), xops);
4728 output_asm_insn (AS1 (inc%L0,%0), xops);
4729
4730 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4731 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4732 }
4733 output_asm_insn (AS1 (je,%l12), xops);
4734 output_asm_insn (AS1 (inc%L0,%0), xops);
4735 }
4736
4737 /* Generate loop to check 4 bytes at a time */
4738 /* IMHO it is not a good idea to align this loop. It gives only */
4739 /* huge programs, but does not help to speed up */
4740 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4741 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4742
4743 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4744 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4745
4746 if (QI_REG_P (xops[1]))
4747 {
4748 /* On i586 it is faster to combine the hi- and lo- part as
4749 a kind of lookahead. If anding both yields zero, then one
4750 of both *could* be zero, otherwise none of both is zero;
4751 this saves one instruction, on i486 this is slower
4752 tested with P-90, i486DX2-66, AMD486DX2-66 */
4753 if(TARGET_PENTIUM)
4754 {
4755 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4756 output_asm_insn (AS1 (jne,%l9), xops);
4757 }
4758
4759 /* check first byte */
4760 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4761 output_asm_insn (AS1 (je,%l12), xops);
4762
4763 /* check second byte */
4764 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4765 output_asm_insn (AS1 (je,%l11), xops);
4766
4767 if(TARGET_PENTIUM)
4768 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4769 }
4770 else
4771 {
4772 /* check first byte */
4773 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4774 output_asm_insn (AS1 (je,%l12), xops);
4775
4776 /* check second byte */
4777 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4778 output_asm_insn (AS1 (je,%l11), xops);
4779 }
4780
4781 /* check third byte */
4782 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4783 output_asm_insn (AS1 (je,%l10), xops);
4784
4785 /* check fourth byte and increment address */
4786 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4787 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4788 output_asm_insn (AS1 (jne,%l8), xops);
4789
4790 /* now generate fixups when the compare stops within a 4-byte word */
4791 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4792
4793 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4794 output_asm_insn (AS1 (inc%L0,%0), xops);
4795
4796 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4797 output_asm_insn (AS1 (inc%L0,%0), xops);
4798
4799 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
4800
4801 RET;
4802 }
This page took 0.261735 seconds and 5 git commands to generate.